This commit is contained in:
Galfour 2019-03-21 14:53:09 +00:00
parent 8819422542
commit 0975f71059
29 changed files with 2762 additions and 1628 deletions

196
src/ligo/.old.transpiler.ml Normal file
View File

@ -0,0 +1,196 @@
open Mini_c
module AST = Ligo_parser.Typed.O
module SMap = Ligo_parser.Typed.SMap
module Rename = struct
open! AST
let rec rename_expr_case (src:string) (dst:string) : expr_case -> expr_case = function
| App {operator;arguments} -> App {operator = rename_operator src dst operator ; arguments = rename_exprs src dst arguments}
| Var n when n.name.name = src -> Var {n with name = {n.name with name = dst}}
| Var n -> Var n
| Constant c -> Constant c
| Record r -> Record (List.map (fun (key, expr) -> key, rename_expr src dst expr) r)
| Lambda {parameter} as l when parameter.name.name = src -> l
| Lambda ({instructions;declarations} as l) ->
Lambda {l with instructions = rename_instrs src dst instructions ; declarations = rename_declarations src dst declarations}
and rename_expr (src:string) (dst:string) (e : expr) : expr =
{ e with expr = rename_expr_case src dst e.expr }
and rename_exprs src dst exprs = List.map (rename_expr src dst) exprs
and rename_operator_case (src:string) (dst:string) : operator_case -> operator_case = function
| Function n when n.name = src -> Function {n with name = dst}
| x -> x
and rename_operator src dst (o:operator) : operator = {o with operator = rename_operator_case src dst o.operator}
and rename_var src dst (v:var_name) : var_name =
if v.name = src
then {v with name = dst}
else v
and rename_instr (src:string) (dst:string) : instr -> instr = function
| Assignment {name;value;orig} when name.name = src -> Assignment {name = {name with name = dst};value;orig}
| Assignment {name;value;orig} -> Assignment {value = rename_expr src dst value;name;orig}
| While {condition;body;orig} -> While {condition = rename_expr src dst condition;body=rename_instrs src dst body;orig}
| ForCollection {list;var;body;orig} -> ForCollection {list = rename_expr src dst list;var = rename_var src dst var;
body = rename_instrs src dst body;orig}
| Match ({expr;cases} as a) -> Match {a with expr = rename_expr src dst expr ; cases = rename_match_cases src dst cases}
| ProcedureCall {expr;orig} -> ProcedureCall {expr = rename_expr src dst expr;orig}
| Fail {expr;orig} -> Fail {expr = rename_expr src dst expr;orig}
and rename_instrs src dst : instr list -> instr list = List.map (rename_instr src dst)
and rename_match_cases (src:string) (dst:string) (m:(_ * instr list) list) =
List.map (fun (x, y) -> x, rename_instrs src dst y) m
and rename_declaration (src:string) (dst:string) ({var} as d: decl) : decl =
if var.name.name = src
then {d with var = {var with name = {var.name with name = dst}}}
else d
and rename_declarations (src:string) (dst:string) (decls:decl list) =
List.map (rename_declaration src dst) decls
end
let list_of_map m = List.rev @@ SMap.fold (fun _ v prev -> v :: prev) m []
let rec translate_type : AST.type_expr -> type_value result = fun {type_expr} ->
match type_expr with
| Unit -> ok (`Base Unit)
| Int -> ok (`Base Int)
| String -> ok (`Base String)
| Bool -> ok (`Base Bool)
| Sum m ->
let node = Append_tree.of_list @@ List.map snd @@ list_of_map m in
let aux a b : type_value result =
let%bind a = a in
let%bind b = b in
ok (`Or (a, b))
in
Append_tree.fold_ne translate_type aux node
| Record r ->
let node = Append_tree.of_list @@ List.map snd @@ list_of_map r in
let aux a b : type_value result =
let%bind a = a in
let%bind b = b in
ok (`Pair (a, b))
in
Append_tree.fold_ne translate_type aux node
| Ref t -> translate_type t
| Function {arg;ret} ->
let%bind arg = translate_type arg in
let%bind ret = translate_type ret in
ok (`Function(arg, ret))
| TypeApp _ -> simple_fail "No type application"
let translate_constant : AST.constant -> value result = function
| Unit -> ok `Unit
| String s -> ok (`String s)
| Int n -> ok (`Int (Z.to_int n))
| False -> ok (`Bool false)
| True -> ok (`Bool true)
| _ -> simple_fail ""
let rec translate_lambda : AST.lambda -> anon_function result =
fun {declarations;parameter;instructions;result} ->
let ({name;ty}:AST.typed_var) = parameter in
let%bind input_ty = translate_type ty in
let%bind output_ty = translate_type result.ty in
let%bind result = translate_expr result in
let%bind (declaration_statements : statement list) = translate_declarations declarations in
let%bind (instruction_statements : statement list) = translate_instructions instructions in
let body = declaration_statements @ instruction_statements in
ok {content={binder=name.name;input=input_ty;output=output_ty;body;result} ; capture = No_capture}
and translate_expr' : AST.expr_case -> expression' result = function
| Var {name} -> ok (Var name.name)
| Constant cst ->
let%bind value = translate_constant cst in
ok (Literal value)
| Lambda _ -> simple_fail "Mini_c doesn't deal with lambda in expressions yet"
| _ -> simple_fail ""
and translate_expr env : AST.expr -> expression result = fun {expr;ty} ->
let%bind expr = translate_expr' expr in
let%bind ty = translate_type ty in
ok (expr, ty, env)
and translate_declaration : AST.decl -> statement result = fun {var;value} ->
let%bind expr = translate_expr value in
ok (Assignment(Variable(var.name.name, expr)))
and translate_declarations : AST.decl list -> statement list result = fun declarations ->
bind_list @@ List.map translate_declaration declarations
and translate_match (expr:AST.expr) (cases: (AST.pattern * AST.instr list) list) : statement result =
match cases with
| [(AST.PTrue, instrs_true) ; (AST.PFalse, instrs_false) ] ->
let%bind cond = translate_expr expr in
let%bind b_true = translate_instructions instrs_true in
let%bind b_false = translate_instructions instrs_false in
ok (Cond (cond, b_true, b_false))
| [(AST.PFalse, instrs_false) ; (AST.PTrue, instrs_true) ] ->
let%bind cond = translate_expr expr in
let%bind b_true = translate_instructions instrs_true in
let%bind b_false = translate_instructions instrs_false in
ok (Cond (cond, b_true, b_false))
| _ -> simple_fail "unrecognized pattern"
and translate_instruction : AST.instr -> statement result = function
| Assignment {name ; value} ->
let%bind expr = translate_expr value in
ok (Assignment (Variable(name.name, expr)))
| While {condition ; body} ->
let%bind block = translate_instructions body in
let%bind cond = translate_expr condition in
ok (While (cond, block))
| ForCollection _ -> simple_fail "We don't deal with for collection yet"
| Match {expr;cases} -> translate_match expr cases
| Fail _ -> simple_fail "Fail have to be added in Mini_C"
| ProcedureCall _ -> simple_fail "Drop Unit have to be added in Mini_C"
and translate_instructions : AST.instr list -> statement list result = fun instrs ->
bind_list @@ List.map translate_instruction instrs
let translate_program : AST.ast -> block result = fun {declarations} ->
translate_declarations declarations
let rec to_mini_c_value' : (AST.expr_case * AST.type_expr) -> value result = function
| Constant c, _ -> translate_constant c
| App {arguments;operator = {operator = Constructor c ; ty = {type_expr = Sum lst}}}, _ ->
let node = Append_tree.of_list @@ List.map fst @@ list_of_map lst in
let%bind lst =
trace_option (simple_error "Not constructor of variant type") @@
Append_tree.exists_path (fun (x:AST.name_and_region) -> x.name = c.name) node in
let arg = List.hd arguments in
let%bind arg = to_mini_c_value arg in
let ors = List.fold_left (fun b a -> if a then `Right b else `Left b) arg (List.rev lst) in
ok ors
| App _, _ -> simple_fail "Applications aren't value"
| Record lst, _ ->
let node = Append_tree.of_list @@ List.map snd lst in
let aux a b =
let%bind a = a in
let%bind b = b in
ok (`Pair (a, b))
in
Append_tree.fold_ne to_mini_c_value aux node
| Lambda _, _-> simple_fail "Lambda aren't value yet"
| Var _, _-> simple_fail "Var aren't value yet"
and to_mini_c_value : AST.expr -> value result = fun {expr;ty} ->
to_mini_c_value' (expr, ty)
let ghost expr ty : AST.expr = {expr;ty;orig=`TODO}
let of_mini_c_value ({type_expr} as ty, v : AST.type_expr * value) : AST.expr result = match (type_expr, v) with
| String, `String s -> ok @@ ghost (Constant (String s)) ty
| Bool, `Bool b -> ok @@ ghost (Constant (if b then True else False)) ty
| Unit, `Unit -> ok @@ ghost (Constant (Unit)) ty
| Int, `Int n -> ok @@ ghost (Constant (Int (Z.of_int n))) ty
| Function _, _ -> simple_fail "Functions aren't retrieved from Mini_C yet"
| _ -> simple_fail "of_mini_c_value error"

View File

@ -41,18 +41,21 @@ and type_expression =
| Type_variable of type_name | Type_variable of type_name
| Type_constant of type_name * te list | Type_constant of type_name * te list
and expression = and lambda = {
(* Base *)
| Literal of literal
| Constant of name * ae list (* For language constants, like (Cons hd tl) or (plus i j) *)
| Variable of name
| Lambda of {
binder: name ; binder: name ;
input_type: type_expression ; input_type: type_expression ;
output_type: type_expression ; output_type: type_expression ;
result: ae ; result: ae ;
body: block ; body: block ;
} }
and expression =
(* Base *)
| Literal of literal
| Constant of name * ae list (* For language constants, like (Cons hd tl) or (plus i j) *)
| Variable of name
| Lambda of lambda
| Application of ae * ae
(* Tuple *) (* Tuple *)
| Tuple of ae list | Tuple of ae list
| Tuple_accessor of ae * int (* Access n'th tuple's element *) | Tuple_accessor of ae * int (* Access n'th tuple's element *)
@ -63,6 +66,7 @@ and expression =
| Record_accessor of ae * string | Record_accessor of ae * string
and literal = and literal =
| Unit
| Bool of bool | Bool of bool
| Number of int | Number of int
| String of string | String of string
@ -92,3 +96,126 @@ and matching =
match_some : name * b ; match_some : name * b ;
} }
| Match_tuple of (name * b) list | Match_tuple of (name * b) list
let ae expression = {expression ; type_annotation = None}
open Ligo_helpers.Trace
module Simplify = struct
module Raw = Ligo_parser.AST
let nseq_to_list (hd, tl) = hd :: tl
let npseq_to_list (hd, tl) = hd :: (List.map snd tl)
let rec simpl_type_expression (t:Raw.type_expr) : type_expression result =
match t with
| TPar x -> simpl_type_expression x.value.inside
| TAlias v -> ok @@ Type_variable v.value
| TApp x ->
let (name, tuple) = x.value in
let%bind lst = bind_list
@@ List.map simpl_type_expression
@@ npseq_to_list tuple.value.inside in
ok @@ Type_constant (name.value, lst)
| TProd p ->
let%bind lst = bind_list
@@ List.map simpl_type_expression
@@ npseq_to_list p.value in
ok @@ Type_tuple lst
| TRecord r ->
let aux = fun (x, y) -> let%bind y = simpl_type_expression y in ok (x, y) in
let%bind lst = bind_list
@@ List.map aux
@@ List.map (fun (x:Raw.field_decl Raw.reg) -> (x.value.field_name.value, x.value.field_type))
@@ npseq_to_list r.value.fields in
let m = List.fold_left (fun m (x, y) -> SMap.add x y m) SMap.empty lst in
ok @@ Type_record m
| TSum s ->
let aux (v:Raw.variant Raw.reg) =
let%bind te = simpl_list_type_expression
@@ npseq_to_list v.value.product.value in
ok (v.value.constr.value, te)
in
let%bind lst = bind_list
@@ List.map aux
@@ npseq_to_list s.value in
let m = List.fold_left (fun m (x, y) -> SMap.add x y m) SMap.empty lst in
ok @@ Type_sum m
and simpl_list_type_expression (lst:Raw.type_expr list) : type_expression result =
match lst with
| [] -> assert false
| [hd] -> simpl_type_expression hd
| lst ->
let%bind lst = bind_list @@ List.map simpl_type_expression lst in
ok @@ Type_tuple lst
let rec simpl_expression (t:Raw.expr) : ae result =
match t with
| EVar c -> ok @@ ae @@ Variable c.value
| ECall x ->
let (name, args) = x.value in
let f = name.value in
let%bind arg = simpl_list_expression
@@ npseq_to_list args.value.inside in
ok @@ ae @@ Application (ae @@ Variable f, arg)
| EPar x -> simpl_expression x.value.inside
| EUnit _ -> ok @@ ae @@ Literal Unit
| EBytes x -> ok @@ ae @@ Literal (Bytes (fst x.value))
| ETuple tpl ->
simpl_list_expression
@@ npseq_to_list tpl.value.inside
| EConstr (ConstrApp c) ->
let (c, args) = c.value in
let%bind arg =
simpl_list_expression
@@ npseq_to_list args.value.inside in
ok @@ ae @@ Constructor (c.value, arg)
| EArith (Add c) ->
let%bind (a, b) = simpl_binop c.value in
ok @@ ae @@ Constant ("ADD", [a;b])
| EArith (Int n) ->
let n = Z.to_int @@ snd @@ n.value in
ok @@ ae @@ Literal (Number n)
| EArith _ -> simple_fail "arith: not supported yet"
| EString (String s) ->
ok @@ ae @@ Literal (String s.value)
| EString _ -> simple_fail "string: not supported yet"
| _ -> simple_fail "todo"
and simpl_binop (t:_ Raw.bin_op) : (ae * ae) result =
let%bind a = simpl_expression t.arg1 in
let%bind b = simpl_expression t.arg2 in
ok (a, b)
and simpl_list_expression (lst:Raw.expr list) : ae result =
match lst with
| [] -> ok @@ ae @@ Literal Unit
| [hd] -> simpl_expression hd
| lst ->
let%bind lst = bind_list @@ List.map simpl_expression lst in
ok @@ ae @@ Tuple lst
and simpl_lambda (t:Raw.lambda_decl) : lambda result = simple_fail "todo"
and simpl_declaration (t:Raw.declaration) : declaration result =
let open! Raw in
match t with
| TypeDecl x ->
let {name;type_expr} : Raw.type_decl = x.value in
let%bind type_expression = simpl_type_expression type_expr in
ok @@ Type_declaration {type_name=name.value;type_expression}
| ConstDecl x ->
let {name;const_type;init} = x.value in
let%bind expression = simpl_expression init in
let%bind t = simpl_type_expression const_type in
let type_annotation = Some t in
ok @@ Constant_declaration {name=name.value;annotated_expression={expression with type_annotation}}
| LambdaDecl (FunDecl x) ->
let {name;param;ret_type;local_decls;block;return} : fun_decl = x.value in
simple_fail "todo"
| _ -> simple_fail "todo"
let simpl_program (t:Raw.ast) : program result =
bind_list @@ List.map simpl_declaration @@ nseq_to_list t.decl
end

View File

@ -38,6 +38,10 @@ let trace err = function
| Ok _ as o -> o | Ok _ as o -> o
| Errors errs -> Errors (err :: errs) | Errors errs -> Errors (err :: errs)
let to_option = function
| Ok o -> Some o
| Errors _ -> None
let trace_option error = function let trace_option error = function
| None -> fail error | None -> fail error
| Some s -> ok s | Some s -> ok s

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
(* Abstract Syntax Tree (AST) for Ligo *) (* Abstract Syntax Tree (AST) for LIGO *)
[@@@warning "-30"] [@@@warning "-30"]
@ -21,9 +21,10 @@ val nseq_to_region : ('a -> Region.t) -> 'a nseq -> Region.t
val nsepseq_to_region : ('a -> Region.t) -> ('a,'sep) nsepseq -> Region.t val nsepseq_to_region : ('a -> Region.t) -> ('a,'sep) nsepseq -> Region.t
val sepseq_to_region : ('a -> Region.t) -> ('a,'sep) sepseq -> Region.t val sepseq_to_region : ('a -> Region.t) -> ('a,'sep) sepseq -> Region.t
(* Keywords of Ligo *) (* Keywords of LIGO *)
type kwd_begin = Region.t type kwd_begin = Region.t
type kwd_case = Region.t
type kwd_const = Region.t type kwd_const = Region.t
type kwd_down = Region.t type kwd_down = Region.t
type kwd_else = Region.t type kwd_else = Region.t
@ -35,14 +36,14 @@ type kwd_function = Region.t
type kwd_if = Region.t type kwd_if = Region.t
type kwd_in = Region.t type kwd_in = Region.t
type kwd_is = Region.t type kwd_is = Region.t
type kwd_match = Region.t type kwd_map = Region.t
type kwd_mod = Region.t type kwd_mod = Region.t
type kwd_not = Region.t type kwd_not = Region.t
type kwd_null = Region.t
type kwd_of = Region.t type kwd_of = Region.t
type kwd_operations = Region.t type kwd_patch = Region.t
type kwd_procedure = Region.t type kwd_procedure = Region.t
type kwd_record = Region.t type kwd_record = Region.t
type kwd_skip = Region.t
type kwd_step = Region.t type kwd_step = Region.t
type kwd_storage = Region.t type kwd_storage = Region.t
type kwd_then = Region.t type kwd_then = Region.t
@ -62,34 +63,34 @@ type c_Unit = Region.t
(* Symbols *) (* Symbols *)
type semi = Region.t type semi = Region.t (* ";" *)
type comma = Region.t type comma = Region.t (* "," *)
type lpar = Region.t type lpar = Region.t (* "(" *)
type rpar = Region.t type rpar = Region.t (* ")" *)
type lbrace = Region.t type lbrace = Region.t (* "{" *)
type rbrace = Region.t type rbrace = Region.t (* "}" *)
type lbracket = Region.t type lbracket = Region.t (* "[" *)
type rbracket = Region.t type rbracket = Region.t (* "]" *)
type cons = Region.t type cons = Region.t (* "#" *)
type vbar = Region.t type vbar = Region.t (* "|" *)
type arrow = Region.t type arrow = Region.t (* "->" *)
type ass = Region.t type assign = Region.t (* ":=" *)
type equal = Region.t type equal = Region.t (* "=" *)
type colon = Region.t type colon = Region.t (* ":" *)
type bool_or = Region.t type bool_or = Region.t (* "||" *)
type bool_and = Region.t type bool_and = Region.t (* "&&" *)
type lt = Region.t type lt = Region.t (* "<" *)
type leq = Region.t type leq = Region.t (* "<=" *)
type gt = Region.t type gt = Region.t (* ">" *)
type geq = Region.t type geq = Region.t (* ">=" *)
type neq = Region.t type neq = Region.t (* "=/=" *)
type plus = Region.t type plus = Region.t (* "+" *)
type minus = Region.t type minus = Region.t (* "-" *)
type slash = Region.t type slash = Region.t (* "/" *)
type times = Region.t type times = Region.t (* "*" *)
type dot = Region.t type dot = Region.t (* "." *)
type wild = Region.t type wild = Region.t (* "_" *)
type cat = Region.t type cat = Region.t (* "^" *)
(* Virtual tokens *) (* Virtual tokens *)
@ -104,25 +105,29 @@ type field_name = string reg
type map_name = string reg type map_name = string reg
type constr = string reg type constr = string reg
(* Comma-separated non-empty lists *)
type 'a csv = ('a, comma) nsepseq
(* Bar-separated non-empty lists *)
type 'a bsv = ('a, vbar) nsepseq
(* Parentheses *) (* Parentheses *)
type 'a par = (lpar * 'a * rpar) reg type 'a par = {
lpar : lpar;
inside : 'a;
rpar : rpar
}
(* Brackets compounds *) (* Brackets compounds *)
type 'a brackets = (lbracket * 'a * rbracket) reg type 'a brackets = {
lbracket : lbracket;
inside : 'a;
rbracket : rbracket
}
(* Braced compounds *) (* Braced compounds *)
type 'a braces = (lbrace * 'a * rbrace) reg type 'a braces = {
lbrace : lbrace;
inside : 'a;
rbrace : rbrace
}
(* The Abstract Syntax Tree *) (* The Abstract Syntax Tree *)
@ -136,8 +141,6 @@ and ast = t
and declaration = and declaration =
TypeDecl of type_decl reg TypeDecl of type_decl reg
| ConstDecl of const_decl reg | ConstDecl of const_decl reg
| StorageDecl of storage_decl reg
| OpDecl of operations_decl reg
| LambdaDecl of lambda_decl | LambdaDecl of lambda_decl
and const_decl = { and const_decl = {
@ -150,22 +153,6 @@ and const_decl = {
terminator : semi option terminator : semi option
} }
and storage_decl = {
kwd_storage : kwd_storage;
name : variable;
colon : colon;
store_type : type_expr;
terminator : semi option
}
and operations_decl = {
kwd_operations : kwd_operations;
name : variable;
colon : colon;
op_type : type_expr;
terminator : semi option
}
(* Type declarations *) (* Type declarations *)
and type_decl = { and type_decl = {
@ -177,24 +164,36 @@ and type_decl = {
} }
and type_expr = and type_expr =
Prod of cartesian TProd of cartesian
| Sum of (variant, vbar) nsepseq reg | TSum of (variant reg, vbar) nsepseq reg
| Record of record_type | TRecord of record_type reg
| TypeApp of (type_name * type_tuple) reg | TApp of (type_name * type_tuple) reg
| ParType of type_expr par | TPar of type_expr par reg
| TAlias of variable | TAlias of variable
and cartesian = (type_expr, times) nsepseq reg and cartesian = (type_expr, times) nsepseq reg
and variant = (constr * kwd_of * cartesian) reg and variant = {
constr : constr;
kwd_of : kwd_of;
product : cartesian
}
and record_type = (kwd_record * field_decls * kwd_end) reg and record_type = {
kwd_record : kwd_record;
fields : field_decls;
kwd_end : kwd_end
}
and field_decls = (field_decl, semi) nsepseq and field_decls = (field_decl reg, semi) nsepseq
and field_decl = (variable * colon * type_expr) reg and field_decl = {
field_name : field_name;
colon : colon;
field_type : type_expr
}
and type_tuple = (type_name, comma) nsepseq par and type_tuple = (type_expr, comma) nsepseq par reg
(* Function and procedure declarations *) (* Function and procedure declarations *)
@ -230,22 +229,50 @@ and proc_decl = {
and entry_decl = { and entry_decl = {
kwd_entrypoint : kwd_entrypoint; kwd_entrypoint : kwd_entrypoint;
name : variable; name : variable;
param : parameters; param : entry_params;
colon : colon;
ret_type : type_expr;
kwd_is : kwd_is; kwd_is : kwd_is;
local_decls : local_decl list; local_decls : local_decl list;
block : block reg; block : block reg;
kwd_with : kwd_with;
return : expr;
terminator : semi option terminator : semi option
} }
and parameters = (param_decl, semi) nsepseq par and parameters = (param_decl, semi) nsepseq par reg
and entry_params = (entry_param_decl, semi) nsepseq par reg
and entry_param_decl =
EntryConst of param_const reg
| EntryVar of param_var reg
| EntryStore of storage reg
and storage = {
kwd_storage : kwd_storage;
var : variable;
colon : colon;
storage_type : type_expr
}
and param_decl = and param_decl =
ParamConst of param_const ParamConst of param_const reg
| ParamVar of param_var | ParamVar of param_var reg
and param_const = (kwd_const * variable * colon * type_expr) reg and param_const = {
kwd_const : kwd_const;
var : variable;
colon : colon;
param_type : type_expr
}
and param_var = (kwd_var * variable * colon * type_expr) reg and param_var = {
kwd_var : kwd_var;
var : variable;
colon : colon;
param_type : type_expr
}
and block = { and block = {
opening : kwd_begin; opening : kwd_begin;
@ -264,12 +291,12 @@ and var_decl = {
name : variable; name : variable;
colon : colon; colon : colon;
var_type : type_expr; var_type : type_expr;
ass : ass; assign : assign;
init : expr; init : expr;
terminator : semi option terminator : semi option
} }
and instructions = (instruction, semi) nsepseq reg and instructions = (instruction, semi) nsepseq
and instruction = and instruction =
Single of single_instr Single of single_instr
@ -277,12 +304,46 @@ and instruction =
and single_instr = and single_instr =
Cond of conditional reg Cond of conditional reg
| Match of match_instr reg | Case of case_instr reg
| Ass of ass_instr | Assign of assignment reg
| Loop of loop | Loop of loop
| ProcCall of fun_call | ProcCall of fun_call
| Null of kwd_null | Fail of fail_instr reg
| Fail of (kwd_fail * expr) reg | Skip of kwd_skip
| RecordPatch of record_patch reg
| MapPatch of map_patch reg
and map_patch = {
kwd_patch : kwd_patch;
path : path;
kwd_with : kwd_with;
map_inj : map_injection reg
}
and map_injection = {
opening : kwd_map;
bindings : (binding reg, semi) nsepseq;
terminator : semi option;
close : kwd_end
}
and binding = {
source : expr;
arrow : arrow;
image : expr
}
and record_patch = {
kwd_patch : kwd_patch;
path : path;
kwd_with : kwd_with;
record_inj : record_injection reg
}
and fail_instr = {
kwd_fail : kwd_fail;
fail_expr : expr
}
and conditional = { and conditional = {
kwd_if : kwd_if; kwd_if : kwd_if;
@ -293,26 +354,46 @@ and conditional = {
ifnot : instruction ifnot : instruction
} }
and match_instr = { and case_instr = {
kwd_match : kwd_match; kwd_case : kwd_case;
expr : expr; expr : expr;
kwd_with : kwd_with; kwd_of : kwd_of;
lead_vbar : vbar option; lead_vbar : vbar option;
cases : cases; cases : cases;
kwd_end : kwd_end kwd_end : kwd_end
} }
and cases = (case, vbar) nsepseq reg and cases = (case reg, vbar) nsepseq reg
and case = (pattern * arrow * instruction) reg and case = {
pattern : pattern;
arrow : arrow;
instr : instruction
}
and ass_instr = (variable * ass * expr) reg and assignment = {
lhs : lhs;
assign : assign;
rhs : rhs;
}
and lhs =
Path of path
| MapPath of map_lookup reg
and rhs =
Expr of expr
| NoneExpr of c_None
and loop = and loop =
While of while_loop While of while_loop reg
| For of for_loop | For of for_loop
and while_loop = (kwd_while * expr * block reg) reg and while_loop = {
kwd_while : kwd_while;
cond : expr;
block : block reg
}
and for_loop = and for_loop =
ForInt of for_int reg ForInt of for_int reg
@ -320,7 +401,7 @@ and for_loop =
and for_int = { and for_int = {
kwd_for : kwd_for; kwd_for : kwd_for;
ass : ass_instr; assign : var_assign reg;
down : kwd_down option; down : kwd_down option;
kwd_to : kwd_to; kwd_to : kwd_to;
bound : expr; bound : expr;
@ -328,6 +409,12 @@ and for_int = {
block : block reg block : block reg
} }
and var_assign = {
name : variable;
assign : assign;
expr : expr
}
and for_collect = { and for_collect = {
kwd_for : kwd_for; kwd_for : kwd_for;
var : variable; var : variable;
@ -340,98 +427,177 @@ and for_collect = {
(* Expressions *) (* Expressions *)
and expr = and expr =
Or of (expr * bool_or * expr) reg ELogic of logic_expr
| And of (expr * bool_and * expr) reg | EArith of arith_expr
| Lt of (expr * lt * expr) reg | EString of string_expr
| Leq of (expr * leq * expr) reg | EList of list_expr
| Gt of (expr * gt * expr) reg | ESet of set_expr
| Geq of (expr * geq * expr) reg | EConstr of constr_expr
| Equal of (expr * equal * expr) reg | ERecord of record_expr
| Neq of (expr * neq * expr) reg | EMap of map_expr
| Cat of (expr * cat * expr) reg | EVar of Lexer.lexeme reg
| Cons of (expr * cons * expr) reg | ECall of fun_call
| Add of (expr * plus * expr) reg | EBytes of (Lexer.lexeme * Hex.t) reg
| Sub of (expr * minus * expr) reg | EUnit of c_Unit
| Mult of (expr * times * expr) reg | ETuple of tuple
| Div of (expr * slash * expr) reg | EPar of expr par reg
| Mod of (expr * kwd_mod * expr) reg
| Neg of (minus * expr) reg and map_expr =
| Not of (kwd_not * expr) reg MapLookUp of map_lookup reg
| Int of (Lexer.lexeme * Z.t) reg | MapInj of map_injection reg
| Var of Lexer.lexeme reg
| String of Lexer.lexeme reg and map_lookup = {
| Bytes of (Lexer.lexeme * MBytes.t) reg path : path;
index : expr brackets reg
}
and path =
Name of variable
| RecordPath of record_projection reg
and logic_expr =
BoolExpr of bool_expr
| CompExpr of comp_expr
and bool_expr =
Or of bool_or bin_op reg
| And of bool_and bin_op reg
| Not of kwd_not un_op reg
| False of c_False | False of c_False
| True of c_True | True of c_True
| Unit of c_Unit
| Tuple of tuple
| List of (expr, comma) nsepseq brackets
| EmptyList of empty_list
| Set of (expr, comma) nsepseq braces
| EmptySet of empty_set
| NoneExpr of none_expr
| FunCall of fun_call
| ConstrApp of constr_app
| SomeApp of (c_Some * arguments) reg
| MapLookUp of map_lookup reg
| ParExpr of expr par
and tuple = (expr, comma) nsepseq par and 'a bin_op = {
op : 'a;
arg1 : expr;
arg2 : expr
}
and empty_list = and 'a un_op = {
(lbracket * rbracket * colon * type_expr) par op : 'a;
arg : expr
}
and empty_set = and comp_expr =
(lbrace * rbrace * colon * type_expr) par Lt of lt bin_op reg
| Leq of leq bin_op reg
| Gt of gt bin_op reg
| Geq of geq bin_op reg
| Equal of equal bin_op reg
| Neq of neq bin_op reg
and none_expr = and arith_expr =
(c_None * colon * type_expr) par Add of plus bin_op reg
| Sub of minus bin_op reg
| Mult of times bin_op reg
| Div of slash bin_op reg
| Mod of kwd_mod bin_op reg
| Neg of minus un_op reg
| Int of (Lexer.lexeme * Z.t) reg
and string_expr =
Cat of cat bin_op reg
| String of Lexer.lexeme reg
and list_expr =
Cons of cons bin_op reg
| List of (expr, comma) nsepseq brackets reg
| EmptyList of empty_list reg
and set_expr =
Set of (expr, comma) nsepseq braces reg
| EmptySet of empty_set reg
and constr_expr =
SomeApp of (c_Some * arguments) reg
| NoneExpr of none_expr reg
| ConstrApp of (constr * arguments) reg
and record_expr =
RecordInj of record_injection reg
| RecordProj of record_projection reg
and record_injection = {
opening : kwd_record;
fields : (field_assign reg, semi) nsepseq;
terminator : semi option;
close : kwd_end
}
and field_assign = {
field_name : field_name;
equal : equal;
field_expr : expr
}
and record_projection = {
record_name : variable;
selector : dot;
field_path : (field_name, dot) nsepseq
}
and tuple = (expr, comma) nsepseq par reg
and empty_list = typed_empty_list par
and typed_empty_list = {
lbracket : lbracket;
rbracket : rbracket;
colon : colon;
list_type : type_expr
}
and empty_set = typed_empty_set par
and typed_empty_set = {
lbrace : lbrace;
rbrace : rbrace;
colon : colon;
set_type : type_expr
}
and none_expr = typed_none_expr par
and typed_none_expr = {
c_None : c_None;
colon : colon;
opt_type : type_expr
}
and fun_call = (fun_name * arguments) reg and fun_call = (fun_name * arguments) reg
and arguments = tuple and arguments = tuple
and constr_app = (constr * arguments) reg
and map_lookup = {
map_name : variable;
selector : dot;
index : expr brackets
}
(* Patterns *) (* Patterns *)
and pattern = (core_pattern, cons) nsepseq reg and pattern =
PCons of (pattern, cons) nsepseq reg
and core_pattern = | PVar of Lexer.lexeme reg
PVar of Lexer.lexeme reg
| PWild of wild | PWild of wild
| PInt of (Lexer.lexeme * Z.t) reg | PInt of (Lexer.lexeme * Z.t) reg
| PBytes of (Lexer.lexeme * MBytes.t) reg | PBytes of (Lexer.lexeme * Hex.t) reg
| PString of Lexer.lexeme reg | PString of Lexer.lexeme reg
| PUnit of c_Unit | PUnit of c_Unit
| PFalse of c_False | PFalse of c_False
| PTrue of c_True | PTrue of c_True
| PNone of c_None | PNone of c_None
| PSome of (c_Some * core_pattern par) reg | PSome of (c_Some * pattern par reg) reg
| PList of list_pattern | PList of list_pattern
| PTuple of (core_pattern, comma) nsepseq par | PTuple of (pattern, comma) nsepseq par reg
and list_pattern = and list_pattern =
Sugar of (core_pattern, comma) sepseq brackets Sugar of (pattern, comma) sepseq brackets reg
| Raw of (core_pattern * cons * pattern) par | Raw of (pattern * cons * pattern) par reg
(* Projecting regions *) (* Projecting regions *)
val type_expr_to_region : type_expr -> Region.t val type_expr_to_region : type_expr -> Region.t
val expr_to_region : expr -> Region.t val expr_to_region : expr -> Region.t
val instr_to_region : instruction -> Region.t val instr_to_region : instruction -> Region.t
val pattern_to_region : pattern -> Region.t
val core_pattern_to_region : core_pattern -> Region.t
val local_decl_to_region : local_decl -> Region.t val local_decl_to_region : local_decl -> Region.t
val path_to_region : path -> Region.t
val lhs_to_region : lhs -> Region.t
val rhs_to_region : rhs -> Region.t
(* Printing *) (* Printing *)

View File

@ -18,7 +18,7 @@ module O = struct
PVar of var_name PVar of var_name
| PWild | PWild
| PInt of Z.t | PInt of Z.t
| PBytes of MBytes.t | PBytes of Hex.t
| PString of string | PString of string
| PUnit | PUnit
| PFalse | PFalse
@ -42,6 +42,7 @@ module O = struct
| Function of { arg: type_expr; ret: type_expr } | Function of { arg: type_expr; ret: type_expr }
| Ref of type_expr | Ref of type_expr
| String | String
| Bytes
| Int | Int
| Unit | Unit
| Bool | Bool
@ -80,7 +81,7 @@ module O = struct
and constant = and constant =
Unit Unit
| Int of Z.t | String of string | Bytes of MBytes.t | Int of Z.t | String of string | Bytes of Hex.t
| False | True | False | True
| Null of type_expr | Null of type_expr
| EmptySet of type_expr | EmptySet of type_expr
@ -653,7 +654,7 @@ let s_ast (ast : I.ast) : O.ast =
(* and s_bytes {region; value = lexeme, abstract} = *) (* and s_bytes {region; value = lexeme, abstract} = *)
(* printf "%s: Bytes (\"%s\", \"0x%s\")\n" *) (* printf "%s: Bytes (\"%s\", \"0x%s\")\n" *)
(* (compact region) lexeme *) (* (compact region) lexeme *)
(* (MBytes.to_hex abstract |> Hex.to_string) *) (* (Hex.to_string abstract) *)
(* and s_int {region; value = lexeme, abstract} = *) (* and s_int {region; value = lexeme, abstract} = *)
(* printf "%s: Int (\"%s\", %s)\n" *) (* printf "%s: Int (\"%s\", %s)\n" *)

View File

@ -1,4 +1,4 @@
(* Parsing the command-line option for testing the Ligo lexer and (* Parsing the command-line option for testing the LIGO lexer and
parser *) parser *)
let printf = Printf.printf let printf = Printf.printf
@ -11,16 +11,17 @@ let abort msg =
let help () = let help () =
let file = Filename.basename Sys.argv.(0) in let file = Filename.basename Sys.argv.(0) in
printf "Usage: %s [<option> ...] [<input>.li | \"-\"]\n" file; printf "Usage: %s [<option> ...] [<input>.ligo | \"-\"]\n" file;
print_endline "where <input>.li is the Ligo source file (default: stdin),"; print_endline "where <input>.ligo is the LIGO source file (default: stdin),";
print_endline "and each <option> (if any) is one of the following:"; print_endline "and each <option> (if any) is one of the following:";
print_endline " -I <paths> Library paths (colon-separated)";
print_endline " -c, --copy Print lexemes of tokens and markup (lexer)"; print_endline " -c, --copy Print lexemes of tokens and markup (lexer)";
print_endline " -t, --tokens Print tokens (lexer)"; print_endline " -t, --tokens Print tokens (lexer)";
print_endline " -u, --units Print tokens and markup (lexer)"; print_endline " -u, --units Print tokens and markup (lexer)";
print_endline " -q, --quiet No output, except errors (default)"; print_endline " -q, --quiet No output, except errors (default)";
print_endline " --columns Columns for source locations"; print_endline " --columns Columns for source locations";
print_endline " --bytes Bytes for source locations"; print_endline " --bytes Bytes for source locations";
print_endline " --verbose=<stages> cmdline, ast"; print_endline " --verbose=<stages> cmdline, cpp, ast (colon-separated)";
print_endline " --version Commit hash on stdout"; print_endline " --version Commit hash on stdout";
print_endline " -h, --help This help"; print_endline " -h, --help This help";
exit 0 exit 0
@ -39,9 +40,12 @@ and columns = ref false
and bytes = ref false and bytes = ref false
and verbose = ref Utils.String.Set.empty and verbose = ref Utils.String.Set.empty
and input = ref None and input = ref None
and libs = ref []
let split_at_colon = Str.(split (regexp ":")) let split_at_colon = Str.(split (regexp ":"))
let add_path p = libs := !libs @ split_at_colon p
let add_verbose d = let add_verbose d =
verbose := List.fold_left (Utils.swap Utils.String.Set.add) verbose := List.fold_left (Utils.swap Utils.String.Set.add)
!verbose !verbose
@ -49,6 +53,7 @@ let add_verbose d =
let specs = let specs =
let open! Getopt in [ let open! Getopt in [
'I', nolong, None, Some add_path;
'c', "copy", set copy true, None; 'c', "copy", set copy true, None;
't', "tokens", set tokens true, None; 't', "tokens", set tokens true, None;
'u', "units", set units true, None; 'u', "units", set units true, None;
@ -92,6 +97,10 @@ let string_of convert = function
None -> "None" None -> "None"
| Some s -> sprintf "Some %s" (convert s) | Some s -> sprintf "Some %s" (convert s)
let string_of_path p =
let apply s a = if a = "" then s else s ^ ":" ^ a
in List.fold_right apply p ""
let quote s = sprintf "\"%s\"" s let quote s = sprintf "\"%s\"" s
let verbose_str = let verbose_str =
@ -108,7 +117,8 @@ let print_opt () =
printf "columns = %b\n" !columns; printf "columns = %b\n" !columns;
printf "bytes = %b\n" !bytes; printf "bytes = %b\n" !bytes;
printf "verbose = \"%s\"\n" verbose_str; printf "verbose = \"%s\"\n" verbose_str;
printf "input = %s\n" (string_of quote !input) printf "input = %s\n" (string_of quote !input);
printf "libs = %s\n" (string_of_path !libs)
;; ;;
if Utils.String.Set.mem "cmdline" !verbose then print_opt ();; if Utils.String.Set.mem "cmdline" !verbose then print_opt ();;
@ -117,11 +127,11 @@ let input =
match !input with match !input with
None | Some "-" -> !input None | Some "-" -> !input
| Some file_path -> | Some file_path ->
if Filename.check_suffix file_path ".li" if Filename.check_suffix file_path ".ligo"
then if Sys.file_exists file_path then if Sys.file_exists file_path
then Some file_path then Some file_path
else abort "Source file not found." else abort "Source file not found."
else abort "Source file lacks the extension .li." else abort "Source file lacks the extension .ligo."
(* Exporting remaining options as non-mutable values *) (* Exporting remaining options as non-mutable values *)
@ -132,6 +142,7 @@ and quiet = !quiet
and offsets = not !columns and offsets = not !columns
and mode = if !bytes then `Byte else `Point and mode = if !bytes then `Byte else `Point
and verbose = !verbose and verbose = !verbose
and libs = !libs
;; ;;
if Utils.String.Set.mem "cmdline" verbose then if Utils.String.Set.mem "cmdline" verbose then
@ -144,6 +155,7 @@ if Utils.String.Set.mem "cmdline" verbose then
printf "offsets = %b\n" offsets; printf "offsets = %b\n" offsets;
printf "mode = %s\n" (if mode = `Byte then "`Byte" else "`Point"); printf "mode = %s\n" (if mode = `Byte then "`Byte" else "`Point");
printf "verbose = \"%s\"\n" verbose_str; printf "verbose = \"%s\"\n" verbose_str;
printf "input = %s\n" (string_of quote input) printf "input = %s\n" (string_of quote input);
printf "I = %s\n" (string_of_path libs)
end end
;; ;;

View File

@ -1,4 +1,4 @@
(* Parsing the command-line option for testing the Ligo lexer and (* Parsing the command-line option for testing the LIGO lexer and
parser *) parser *)
(* If the value [offsets] is [true], then the user requested that (* If the value [offsets] is [true], then the user requested that
@ -25,6 +25,10 @@ val verbose : Utils.String.Set.t
val input : string option val input : string option
(* Paths where to find LIGO files for inclusion *)
val libs : string list
(* If the value [cmd] is (* If the value [cmd] is
* [Quiet], then no output from the lexer and parser should be * [Quiet], then no output from the lexer and parser should be
expected, safe error messages: this is the default value; expected, safe error messages: this is the default value;

View File

@ -1,4 +1,4 @@
(* This signature defines the lexical tokens for Ligo (* This signature defines the lexical tokens for LIGO
_Tokens_ are the abstract units which are used by the parser to _Tokens_ are the abstract units which are used by the parser to
build the abstract syntax tree (AST), in other words, the stream of build the abstract syntax tree (AST), in other words, the stream of
@ -29,7 +29,7 @@ type t =
(* Literals *) (* Literals *)
String of lexeme Region.reg String of lexeme Region.reg
| Bytes of (lexeme * MBytes.t) Region.reg | Bytes of (lexeme * Hex.t) Region.reg
| Int of (lexeme * Z.t) Region.reg | Int of (lexeme * Z.t) Region.reg
| Ident of lexeme Region.reg | Ident of lexeme Region.reg
| Constr of lexeme Region.reg | Constr of lexeme Region.reg
@ -50,8 +50,6 @@ type t =
| ASS of Region.t (* ":=" *) | ASS of Region.t (* ":=" *)
| EQUAL of Region.t (* "=" *) | EQUAL of Region.t (* "=" *)
| COLON of Region.t (* ":" *) | COLON of Region.t (* ":" *)
| OR of Region.t (* "||" *)
| AND of Region.t (* "&&" *)
| LT of Region.t (* "<" *) | LT of Region.t (* "<" *)
| LEQ of Region.t (* "<=" *) | LEQ of Region.t (* "<=" *)
| GT of Region.t (* ">" *) | GT of Region.t (* ">" *)
@ -67,32 +65,35 @@ type t =
(* Keywords *) (* Keywords *)
| And of Region.t (* "and" *)
| Begin of Region.t (* "begin" *) | Begin of Region.t (* "begin" *)
| Case of Region.t (* "case" *)
| Const of Region.t (* "const" *) | Const of Region.t (* "const" *)
| Down of Region.t (* "down" *) | Down of Region.t (* "down" *)
| Else of Region.t (* "else" *)
| End of Region.t (* "end" *)
| Entrypoint of Region.t (* "entrypoint" *)
| Fail of Region.t (* "fail" *) | Fail of Region.t (* "fail" *)
| For of Region.t (* "for" *)
| Function of Region.t (* "function" *)
| If of Region.t (* "if" *) | If of Region.t (* "if" *)
| In of Region.t (* "in" *) | In of Region.t (* "in" *)
| Is of Region.t (* "is" *) | Is of Region.t (* "is" *)
| Entrypoint of Region.t (* "entrypoint" *) | Map of Region.t (* "map" *)
| For of Region.t (* "for" *)
| Function of Region.t (* "function" *)
| Storage of Region.t (* "storage" *)
| Type of Region.t (* "type" *)
| Of of Region.t (* "of" *)
| Operations of Region.t (* "operations" *)
| Var of Region.t (* "var" *)
| End of Region.t (* "end" *)
| Then of Region.t (* "then" *)
| Else of Region.t (* "else" *)
| Match of Region.t (* "match" *)
| Null of Region.t (* "null" *)
| Procedure of Region.t (* "procedure" *)
| Record of Region.t (* "record" *)
| Step of Region.t (* "step" *)
| To of Region.t (* "to" *)
| Mod of Region.t (* "mod" *) | Mod of Region.t (* "mod" *)
| Not of Region.t (* "not" *) | Not of Region.t (* "not" *)
| Of of Region.t (* "of" *)
| Or of Region.t (* "or" *)
| Patch of Region.t (* "patch" *)
| Procedure of Region.t (* "procedure" *)
| Record of Region.t (* "record" *)
| Skip of Region.t (* "skip" *)
| Step of Region.t (* "step" *)
| Storage of Region.t (* "storage" *)
| Then of Region.t (* "then" *)
| To of Region.t (* "to" *)
| Type of Region.t (* "type" *)
| Var of Region.t (* "var" *)
| While of Region.t (* "while" *) | While of Region.t (* "while" *)
| With of Region.t (* "with" *) | With of Region.t (* "with" *)

View File

@ -1,4 +1,4 @@
(* Lexer specification for Ligo, to be processed by [ocamllex] *) (* Lexer specification for LIGO, to be processed by [ocamllex] *)
{ {
(* START HEADER *) (* START HEADER *)
@ -28,7 +28,7 @@ type t =
(* Literals *) (* Literals *)
String of lexeme Region.reg String of lexeme Region.reg
| Bytes of (lexeme * MBytes.t) Region.reg | Bytes of (lexeme * Hex.t) Region.reg
| Int of (lexeme * Z.t) Region.reg | Int of (lexeme * Z.t) Region.reg
| Ident of lexeme Region.reg | Ident of lexeme Region.reg
| Constr of lexeme Region.reg | Constr of lexeme Region.reg
@ -49,8 +49,6 @@ type t =
| ASS of Region.t | ASS of Region.t
| EQUAL of Region.t | EQUAL of Region.t
| COLON of Region.t | COLON of Region.t
| OR of Region.t
| AND of Region.t
| LT of Region.t | LT of Region.t
| LEQ of Region.t | LEQ of Region.t
| GT of Region.t | GT of Region.t
@ -66,34 +64,37 @@ type t =
(* Keywords *) (* Keywords *)
| Begin of Region.t | And of Region.t (* "and" *)
| Const of Region.t | Begin of Region.t (* "begin" *)
| Down of Region.t | Case of Region.t (* "case" *)
| Fail of Region.t | Const of Region.t (* "const" *)
| If of Region.t | Down of Region.t (* "down" *)
| In of Region.t | Else of Region.t (* "else" *)
| Is of Region.t | End of Region.t (* "end" *)
| Entrypoint of Region.t | Entrypoint of Region.t (* "entrypoint" *)
| For of Region.t | Fail of Region.t (* "fail" *)
| Function of Region.t | For of Region.t (* "for" *)
| Storage of Region.t | Function of Region.t (* "function" *)
| Type of Region.t | If of Region.t (* "if" *)
| Of of Region.t | In of Region.t (* "in" *)
| Operations of Region.t | Is of Region.t (* "is" *)
| Var of Region.t | Map of Region.t (* "map" *)
| End of Region.t | Mod of Region.t (* "mod" *)
| Then of Region.t | Not of Region.t (* "not" *)
| Else of Region.t | Of of Region.t (* "of" *)
| Match of Region.t | Or of Region.t (* "or" *)
| Null of Region.t | Patch of Region.t (* "patch" *)
| Procedure of Region.t | Procedure of Region.t (* "procedure" *)
| Record of Region.t | Record of Region.t (* "record" *)
| Step of Region.t | Skip of Region.t (* "skip" *)
| To of Region.t | Step of Region.t (* "step" *)
| Mod of Region.t | Storage of Region.t (* "storage" *)
| Not of Region.t | Then of Region.t (* "then" *)
| While of Region.t | To of Region.t (* "to" *)
| With of Region.t | Type of Region.t (* "type" *)
| Var of Region.t (* "var" *)
| While of Region.t (* "while" *)
| With of Region.t (* "with" *)
(* Types *) (* Types *)
(* (*
@ -141,7 +142,7 @@ let proj_token = function
| Bytes Region.{region; value = s,b} -> | Bytes Region.{region; value = s,b} ->
region, region,
sprintf "Bytes (\"%s\", \"0x%s\")" sprintf "Bytes (\"%s\", \"0x%s\")"
s (MBytes.to_hex b |> Hex.to_string) s (Hex.to_string b)
| Int Region.{region; value = s,n} -> | Int Region.{region; value = s,n} ->
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n) region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
@ -168,8 +169,6 @@ let proj_token = function
| ASS region -> region, "ASS" | ASS region -> region, "ASS"
| EQUAL region -> region, "EQUAL" | EQUAL region -> region, "EQUAL"
| COLON region -> region, "COLON" | COLON region -> region, "COLON"
| OR region -> region, "OR"
| AND region -> region, "AND"
| LT region -> region, "LT" | LT region -> region, "LT"
| LEQ region -> region, "LEQ" | LEQ region -> region, "LEQ"
| GT region -> region, "GT" | GT region -> region, "GT"
@ -185,32 +184,35 @@ let proj_token = function
(* Keywords *) (* Keywords *)
| And region -> region, "And"
| Begin region -> region, "Begin" | Begin region -> region, "Begin"
| Case region -> region, "Case"
| Const region -> region, "Const" | Const region -> region, "Const"
| Down region -> region, "Down" | Down region -> region, "Down"
| Else region -> region, "Else"
| End region -> region, "End"
| Entrypoint region -> region, "Entrypoint"
| Fail region -> region, "Fail" | Fail region -> region, "Fail"
| For region -> region, "For"
| Function region -> region, "Function"
| If region -> region, "If" | If region -> region, "If"
| In region -> region, "In" | In region -> region, "In"
| Is region -> region, "Is" | Is region -> region, "Is"
| Entrypoint region -> region, "Entrypoint" | Map region -> region, "Map"
| For region -> region, "For"
| Function region -> region, "Function"
| Storage region -> region, "Storage"
| Type region -> region, "Type"
| Of region -> region, "Of"
| Operations region -> region, "Operations"
| Var region -> region, "Var"
| End region -> region, "End"
| Then region -> region, "Then"
| Else region -> region, "Else"
| Match region -> region, "Match"
| Null region -> region, "Null"
| Procedure region -> region, "Procedure"
| Record region -> region, "Record"
| Step region -> region, "Step"
| To region -> region, "To"
| Mod region -> region, "Mod" | Mod region -> region, "Mod"
| Not region -> region, "Not" | Not region -> region, "Not"
| Of region -> region, "Of"
| Or region -> region, "Or"
| Patch region -> region, "Patch"
| Procedure region -> region, "Procedure"
| Record region -> region, "Record"
| Skip region -> region, "Skip"
| Step region -> region, "Step"
| Storage region -> region, "Storage"
| Then region -> region, "Then"
| To region -> region, "To"
| Type region -> region, "Type"
| Var region -> region, "Var"
| While region -> region, "While" | While region -> region, "While"
| With region -> region, "With" | With region -> region, "With"
@ -252,8 +254,6 @@ let to_lexeme = function
| ASS _ -> ":=" | ASS _ -> ":="
| EQUAL _ -> "=" | EQUAL _ -> "="
| COLON _ -> ":" | COLON _ -> ":"
| OR _ -> "||"
| AND _ -> "&&"
| LT _ -> "<" | LT _ -> "<"
| LEQ _ -> "<=" | LEQ _ -> "<="
| GT _ -> ">" | GT _ -> ">"
@ -269,7 +269,9 @@ let to_lexeme = function
(* Keywords *) (* Keywords *)
| And _ -> "and"
| Begin _ -> "begin" | Begin _ -> "begin"
| Case _ -> "case"
| Const _ -> "const" | Const _ -> "const"
| Down _ -> "down" | Down _ -> "down"
| Fail _ -> "fail" | Fail _ -> "fail"
@ -279,19 +281,20 @@ let to_lexeme = function
| Entrypoint _ -> "entrypoint" | Entrypoint _ -> "entrypoint"
| For _ -> "for" | For _ -> "for"
| Function _ -> "function" | Function _ -> "function"
| Storage _ -> "storage"
| Type _ -> "type" | Type _ -> "type"
| Of _ -> "of" | Of _ -> "of"
| Operations _ -> "operations" | Or _ -> "or"
| Var _ -> "var" | Var _ -> "var"
| End _ -> "end" | End _ -> "end"
| Then _ -> "then" | Then _ -> "then"
| Else _ -> "else" | Else _ -> "else"
| Match _ -> "match" | Map _ -> "map"
| Null _ -> "null" | Patch _ -> "patch"
| Procedure _ -> "procedure" | Procedure _ -> "procedure"
| Record _ -> "record" | Record _ -> "record"
| Skip _ -> "skip"
| Step _ -> "step" | Step _ -> "step"
| Storage _ -> "storage"
| To _ -> "to" | To _ -> "to"
| Mod _ -> "mod" | Mod _ -> "mod"
| Not _ -> "not" | Not _ -> "not"
@ -321,7 +324,9 @@ let to_region token = proj_token token |> fst
(* LEXIS *) (* LEXIS *)
let keywords = [ let keywords = [
(fun reg -> And reg);
(fun reg -> Begin reg); (fun reg -> Begin reg);
(fun reg -> Case reg);
(fun reg -> Const reg); (fun reg -> Const reg);
(fun reg -> Down reg); (fun reg -> Down reg);
(fun reg -> Fail reg); (fun reg -> Fail reg);
@ -331,19 +336,20 @@ let keywords = [
(fun reg -> Entrypoint reg); (fun reg -> Entrypoint reg);
(fun reg -> For reg); (fun reg -> For reg);
(fun reg -> Function reg); (fun reg -> Function reg);
(fun reg -> Storage reg);
(fun reg -> Type reg); (fun reg -> Type reg);
(fun reg -> Of reg); (fun reg -> Of reg);
(fun reg -> Operations reg); (fun reg -> Or reg);
(fun reg -> Var reg); (fun reg -> Var reg);
(fun reg -> End reg); (fun reg -> End reg);
(fun reg -> Then reg); (fun reg -> Then reg);
(fun reg -> Else reg); (fun reg -> Else reg);
(fun reg -> Match reg); (fun reg -> Map reg);
(fun reg -> Null reg); (fun reg -> Patch reg);
(fun reg -> Procedure reg); (fun reg -> Procedure reg);
(fun reg -> Record reg); (fun reg -> Record reg);
(fun reg -> Skip reg);
(fun reg -> Step reg); (fun reg -> Step reg);
(fun reg -> Storage reg);
(fun reg -> To reg); (fun reg -> To reg);
(fun reg -> Mod reg); (fun reg -> Mod reg);
(fun reg -> Not reg); (fun reg -> Not reg);
@ -353,8 +359,7 @@ let keywords = [
let reserved = let reserved =
let open SSet in let open SSet in
empty |> add "and" empty |> add "as"
|> add "as"
|> add "asr" |> add "asr"
|> add "assert" |> add "assert"
|> add "class" |> add "class"
@ -384,7 +389,6 @@ let reserved =
|> add "nonrec" |> add "nonrec"
|> add "object" |> add "object"
|> add "open" |> add "open"
|> add "or"
|> add "private" |> add "private"
|> add "rec" |> add "rec"
|> add "sig" |> add "sig"
@ -466,7 +470,7 @@ let mk_string lexeme region = String Region.{region; value=lexeme}
let mk_bytes lexeme region = let mk_bytes lexeme region =
let norm = Str.(global_replace (regexp "_") "" lexeme) in let norm = Str.(global_replace (regexp "_") "" lexeme) in
let value = lexeme, MBytes.of_hex (Hex.of_string norm) let value = lexeme, Hex.of_string norm
in Bytes Region.{region; value} in Bytes Region.{region; value}
type int_err = Non_canonical_zero type int_err = Non_canonical_zero
@ -496,8 +500,6 @@ let mk_sym lexeme region =
| ":=" -> ASS region | ":=" -> ASS region
| "=" -> EQUAL region | "=" -> EQUAL region
| ":" -> COLON region | ":" -> COLON region
| "||" -> OR region
| "&&" -> AND region
| "<" -> LT region | "<" -> LT region
| "<=" -> LEQ region | "<=" -> LEQ region
| ">" -> GT region | ">" -> GT region
@ -545,7 +547,9 @@ let is_ident = function
| _ -> false | _ -> false
let is_kwd = function let is_kwd = function
And _
| Begin _ | Begin _
| Case _
| Const _ | Const _
| Down _ | Down _
| Fail _ | Fail _
@ -555,19 +559,20 @@ let is_kwd = function
| Entrypoint _ | Entrypoint _
| For _ | For _
| Function _ | Function _
| Storage _
| Type _ | Type _
| Of _ | Of _
| Operations _ | Or _
| Var _ | Var _
| End _ | End _
| Then _ | Then _
| Else _ | Else _
| Match _ | Map _
| Null _ | Patch _
| Procedure _ | Procedure _
| Record _ | Record _
| Skip _
| Step _ | Step _
| Storage _
| To _ | To _
| Mod _ | Mod _
| Not _ | Not _
@ -599,8 +604,6 @@ let is_sym = function
| ASS _ | ASS _
| EQUAL _ | EQUAL _
| COLON _ | COLON _
| OR _
| AND _
| LT _ | LT _
| LEQ _ | LEQ _
| GT _ | GT _

View File

@ -1,16 +1,16 @@
(* Lexer specification for Ligo, to be processed by [ocamllex]. (* Lexer specification for LIGO, to be processed by [ocamllex].
The underlying design principles are: The underlying design principles are:
(1) enforce stylistic constraints at a lexical level, in order to (1) enforce stylistic constraints at a lexical level, in order to
early reject potentially misleading or poorly written early reject potentially misleading or poorly written
Ligo contracts; LIGO contracts;
(2) provide precise error messages with hint as how to fix the (2) provide precise error messages with hint as how to fix the
issue, which is achieved by consulting the lexical issue, which is achieved by consulting the lexical
right-context of lexemes; right-context of lexemes;
(3) be as independent as possible from the Ligo version, so (3) be as independent as possible from the LIGO version, so
upgrades have as little impact as possible on this upgrades have as little impact as possible on this
specification: this is achieved by using the most general specification: this is achieved by using the most general
regular expressions to match the lexing buffer and broadly regular expressions to match the lexing buffer and broadly
@ -27,7 +27,7 @@
be contextualised by the lexer in terms of input source regions, so be contextualised by the lexer in terms of input source regions, so
useful error messages can be printed, therefore they are part of useful error messages can be printed, therefore they are part of
the signature [TOKEN] that parameterise the functor generated the signature [TOKEN] that parameterise the functor generated
here. For instance, if, in a future release of Ligo, new tokens may here. For instance, if, in a future release of LIGO, new tokens may
be added, and the recognition of their lexemes may entail new be added, and the recognition of their lexemes may entail new
errors, the signature [TOKEN] will have to be augmented and the errors, the signature [TOKEN] will have to be augmented and the
lexer specification changed. However, it is more likely that lexer specification changed. However, it is more likely that

View File

@ -1,4 +1,4 @@
(* Lexer specification for Ligo, to be processed by [ocamllex]. *) (* Lexer specification for LIGO, to be processed by [ocamllex]. *)
{ {
(* START HEADER *) (* START HEADER *)
@ -46,13 +46,30 @@ let reset_file ~file buffer =
let open Lexing in let open Lexing in
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname = file} buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname = file}
let reset_line line_num buffer = let reset_line ~line buffer =
assert (line >= 0);
let open Lexing in let open Lexing in
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_lnum = line_num} buffer.lex_curr_p <- {buffer.lex_curr_p with pos_lnum = line}
let reset ~file ?(line=1) buffer = let reset_offset ~offset buffer =
(* Default value per the [Lexing] standard module convention *) assert (offset >= 0);
reset_file ~file buffer; reset_line line buffer Printf.printf "[reset] offset=%i\n" offset;
let open Lexing in
let bol = buffer.lex_curr_p.pos_bol in
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_cnum = bol (*+ offset*)}
let reset ?file ?line ?offset buffer =
let () =
match file with
Some file -> reset_file ~file buffer
| None -> () in
let () =
match line with
Some line -> reset_line ~line buffer
| None -> () in
match offset with
Some offset -> reset_offset ~offset buffer
| None -> ()
(* Rolling back one lexeme _within the current semantic action_ *) (* Rolling back one lexeme _within the current semantic action_ *)
@ -192,12 +209,12 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
(* STATE *) (* STATE *)
(* Beyond tokens, the result of lexing is a state (a so-called (* Beyond tokens, the result of lexing is a state. The type
_state monad_). The type [state] represents the logical state [state] represents the logical state of the lexing engine, that
of the lexing engine, that is, a value which is threaded during is, a value which is threaded during scanning and which denotes
scanning and which denotes useful, high-level information useful, high-level information beyond what the type
beyond what the type [Lexing.lexbuf] in the standard library [Lexing.lexbuf] in the standard library already provides for
already provides for all generic lexers. all generic lexers.
Tokens are the smallest units used by the parser to build the Tokens are the smallest units used by the parser to build the
abstract syntax tree. The state includes a queue of recognised abstract syntax tree. The state includes a queue of recognised
@ -214,13 +231,13 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
lexer. lexer.
The state also includes a field [pos] which holds the current The state also includes a field [pos] which holds the current
position in the Ligo source file. The position is not always position in the LIGO source file. The position is not always
updated after a single character has been matched: that depends updated after a single character has been matched: that depends
on the regular expression that matched the lexing buffer. on the regular expression that matched the lexing buffer.
The fields [decoder] and [supply] offer the support needed The fields [decoder] and [supply] offer the support needed
for the lexing of UTF-8 encoded characters in comments (the for the lexing of UTF-8 encoded characters in comments (the
only place where they are allowed in Ligo). The former is the only place where they are allowed in LIGO). The former is the
decoder proper and the latter is the effectful function decoder proper and the latter is the effectful function
[supply] that takes a byte, a start index and a length and feed [supply] that takes a byte, a start index and a length and feed
it to [decoder]. See the documentation of the third-party it to [decoder]. See the documentation of the third-party
@ -427,6 +444,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let utf8_bom = "\xEF\xBB\xBF" (* Byte Order Mark for UTF-8 *) let utf8_bom = "\xEF\xBB\xBF" (* Byte Order Mark for UTF-8 *)
let nl = ['\n' '\r'] | "\r\n" let nl = ['\n' '\r'] | "\r\n"
let blank = ' ' | '\t'
let digit = ['0'-'9'] let digit = ['0'-'9']
let natural = digit | digit (digit | '_')* digit let natural = digit | digit (digit | '_')* digit
let integer = '-'? natural let integer = '-'? natural
@ -444,8 +462,9 @@ let esc = "\\n" | "\\\"" | "\\\\" | "\\b"
let symbol = ';' | ',' let symbol = ';' | ','
| '(' | ')' | '{' | '}' | '[' | ']' | '(' | ')' | '{' | '}' | '[' | ']'
| '#' | '|' | "->" | ":=" | '=' | ':' | '#' | '|' | "->" | ":=" | '=' | ':'
| "||" | "&&" | '<' | "<=" | '>' | ">=" | "=/=" | '<' | "<=" | '>' | ">=" | "=/="
| '+' | '-' | '*' | '.' | '_' | '^' | '+' | '-' | '*' | '.' | '_' | '^'
let string = [^'"' '\\' '\n']* (* For strings of #include *)
(* RULES *) (* RULES *)
@ -487,6 +506,43 @@ and scan state = parse
let state = scan_line thread state lexbuf |> push_line let state = scan_line thread state lexbuf |> push_line
in scan state lexbuf } in scan state lexbuf }
(* Management of #include CPP directives
An input LIGO program may contain GNU CPP (C preprocessor)
directives, and the entry modules (named *Main.ml) run CPP on them
in traditional mode:
https://gcc.gnu.org/onlinedocs/cpp/Traditional-Mode.html
The main interest in using CPP is that it can stand for a poor
man's (flat) module system for LIGO thanks to #include
directives, and the traditional mode leaves the markup mostly
undisturbed.
Some of the #line resulting from processing #include directives
deal with system file headers and thus have to be ignored for our
purpose. Moreover, these #line directives may also carry some
additional flags:
https://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html
of which 1 and 2 indicate, respectively, the start of a new file
and the return from a file (after its inclusion has been
processed).
*)
| '#' blank* ("line" blank+)? (integer as line) blank+
'"' (string as file) '"' {
let _, _, state = sync state lexbuf in
let flags, state = scan_flags state [] lexbuf in
let () = ignore flags in
let line = int_of_string line
and file = Filename.basename file in
let pos = state.pos#set ~file ~line ~offset:0 in
let state = {state with pos} in
scan state lexbuf
}
(* Some special errors (* Some special errors
Some special errors are recognised in the semantic actions of the Some special errors are recognised in the semantic actions of the
@ -517,6 +573,18 @@ and scan state = parse
| _ as c { let region, _, _ = sync state lexbuf | _ as c { let region, _, _ = sync state lexbuf
in fail region (Unexpected_character c) } in fail region (Unexpected_character c) }
(* Scanning CPP #include flags *)
and scan_flags state acc = parse
blank+ { let _, _, state = sync state lexbuf
in scan_flags state acc lexbuf }
| integer as code { let _, _, state = sync state lexbuf in
let acc = int_of_string code :: acc
in scan_flags state acc lexbuf }
| nl { List.rev acc, push_newline state lexbuf }
| eof { let _, _, state = sync state lexbuf
in List.rev acc, state (* TODO *) }
(* Finishing a string *) (* Finishing a string *)
and scan_string thread state = parse and scan_string thread state = parse

View File

@ -1,4 +1,4 @@
(* Driver for the lexer of Ligo *) (* Driver for the lexer of LIGO *)
open! EvalOpt (* Reads the command-line options: Effectful! *) open! EvalOpt (* Reads the command-line options: Effectful! *)
@ -9,9 +9,47 @@ let () = Printexc.record_backtrace true
let external_ text = let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
(* Path for CPP inclusions (#include) *)
let lib_path =
match EvalOpt.libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
(* Preprocessing the input source and opening the input channels *)
let prefix =
match EvalOpt.input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp.li"
let pp_input =
if Utils.String.Set.mem "cpp" EvalOpt.verbose
then prefix ^ suffix
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match EvalOpt.input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - -o %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s -o %s"
lib_path file pp_input
let () =
if Utils.String.Set.mem "cpp" EvalOpt.verbose
then Printf.eprintf "%s\n%!" cpp_cmd;
if Sys.command cpp_cmd <> 0 then
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
(* Running the lexer on the input file *) (* Running the lexer on the input file *)
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
let () = Lexer.trace ~offsets:EvalOpt.offsets let () = Lexer.trace ~offsets:EvalOpt.offsets
EvalOpt.mode EvalOpt.input EvalOpt.cmd EvalOpt.mode (Some pp_input) EvalOpt.cmd

View File

@ -1,4 +1,4 @@
(* This module defines the sorts of markup recognised by the Ligo (* This module defines the sorts of markup recognised by the LIGO
lexer *) lexer *)
(* A lexeme is piece of concrete syntax belonging to a token. In (* A lexeme is piece of concrete syntax belonging to a token. In

View File

@ -6,7 +6,7 @@
(* Literals *) (* Literals *)
%token <LexToken.lexeme Region.reg> String %token <LexToken.lexeme Region.reg> String
%token <(LexToken.lexeme * MBytes.t) Region.reg> Bytes %token <(LexToken.lexeme * Hex.t) Region.reg> Bytes
%token <(LexToken.lexeme * Z.t) Region.reg> Int %token <(LexToken.lexeme * Z.t) Region.reg> Int
%token <LexToken.lexeme Region.reg> Ident %token <LexToken.lexeme Region.reg> Ident
%token <LexToken.lexeme Region.reg> Constr %token <LexToken.lexeme Region.reg> Constr
@ -27,8 +27,6 @@
%token <Region.t> ASS (* ":=" *) %token <Region.t> ASS (* ":=" *)
%token <Region.t> EQUAL (* "=" *) %token <Region.t> EQUAL (* "=" *)
%token <Region.t> COLON (* ":" *) %token <Region.t> COLON (* ":" *)
%token <Region.t> OR (* "||" *)
%token <Region.t> AND (* "&&" *)
%token <Region.t> LT (* "<" *) %token <Region.t> LT (* "<" *)
%token <Region.t> LEQ (* "<=" *) %token <Region.t> LEQ (* "<=" *)
%token <Region.t> GT (* ">" *) %token <Region.t> GT (* ">" *)
@ -44,7 +42,9 @@
(* Keywords *) (* Keywords *)
%token <Region.t> And (* "and" *)
%token <Region.t> Begin (* "begin" *) %token <Region.t> Begin (* "begin" *)
%token <Region.t> Case (* "case" *)
%token <Region.t> Const (* "const" *) %token <Region.t> Const (* "const" *)
%token <Region.t> Down (* "down" *) %token <Region.t> Down (* "down" *)
%token <Region.t> Fail (* "fail" *) %token <Region.t> Fail (* "fail" *)
@ -54,19 +54,20 @@
%token <Region.t> Entrypoint (* "entrypoint" *) %token <Region.t> Entrypoint (* "entrypoint" *)
%token <Region.t> For (* "for" *) %token <Region.t> For (* "for" *)
%token <Region.t> Function (* "function" *) %token <Region.t> Function (* "function" *)
%token <Region.t> Storage (* "storage" *)
%token <Region.t> Type (* "type" *) %token <Region.t> Type (* "type" *)
%token <Region.t> Of (* "of" *) %token <Region.t> Of (* "of" *)
%token <Region.t> Operations (* "operations" *) %token <Region.t> Or (* "or" *)
%token <Region.t> Var (* "var" *) %token <Region.t> Var (* "var" *)
%token <Region.t> End (* "end" *) %token <Region.t> End (* "end" *)
%token <Region.t> Then (* "then" *) %token <Region.t> Then (* "then" *)
%token <Region.t> Else (* "else" *) %token <Region.t> Else (* "else" *)
%token <Region.t> Match (* "match" *) %token <Region.t> Map (* "map" *)
%token <Region.t> Null (* "null" *) %token <Region.t> Patch (* "patch" *)
%token <Region.t> Procedure (* "procedure" *) %token <Region.t> Procedure (* "procedure" *)
%token <Region.t> Record (* "record" *) %token <Region.t> Record (* "record" *)
%token <Region.t> Skip (* "skip" *)
%token <Region.t> Step (* "step" *) %token <Region.t> Step (* "step" *)
%token <Region.t> Storage (* "storage" *)
%token <Region.t> To (* "to" *) %token <Region.t> To (* "to" *)
%token <Region.t> Mod (* "mod" *) %token <Region.t> Mod (* "mod" *)
%token <Region.t> Not (* "not" *) %token <Region.t> Not (* "not" *)

View File

@ -13,32 +13,72 @@ open AST
(* Entry points *) (* Entry points *)
%start program interactive_expr %start contract interactive_expr
%type <AST.t> program %type <AST.t> contract
%type <AST.expr> interactive_expr %type <AST.expr> interactive_expr
%% %%
(* RULES *) (* RULES *)
(* The rule [series(Item)] parses a list of [Item] separated by
semi-colons and optionally terminated by a semi-colon, then the
keyword [End]. *)
series(Item):
Item after_item(Item) { $1,$2 }
after_item(Item):
SEMI item_or_end(Item) {
match $2 with
`Some (item, items, term, close) ->
($1, item)::items, term, close
| `End close ->
[], Some $1, close
}
| End {
[], None, $1
}
item_or_end(Item):
End {
`End $1
}
| series(Item) {
let item, (items, term, close) = $1
in `Some (item, items, term, close)
}
(* Compound constructs *) (* Compound constructs *)
par(X): par(X):
LPAR X RPAR { LPAR X RPAR {
let region = cover $1 $3 let region = cover $1 $3
in {region; value = $1,$2,$3} and value = {
lpar = $1;
inside = $2;
rpar = $3}
in {region; value}
} }
braces(X): braces(X):
LBRACE X RBRACE { LBRACE X RBRACE {
let region = cover $1 $3 let region = cover $1 $3
in {region; value = $1,$2,$3} and value = {
lbrace = $1;
inside = $2;
rbrace = $3}
in {region; value}
} }
brackets(X): brackets(X):
LBRACKET X RBRACKET { LBRACKET X RBRACKET {
let region = cover $1 $3 let region = cover $1 $3
in {region; value = $1,$2,$3} and value = {
lbracket = $1;
inside = $2;
rbracket = $3}
in {region; value}
} }
(* Sequences (* Sequences
@ -85,11 +125,11 @@ sepseq(X,Sep):
%inline type_name : Ident { $1 } %inline type_name : Ident { $1 }
%inline fun_name : Ident { $1 } %inline fun_name : Ident { $1 }
%inline field_name : Ident { $1 } %inline field_name : Ident { $1 }
%inline map_name : Ident { $1 } %inline record_name : Ident { $1 }
(* Main *) (* Main *)
program: contract:
nseq(declaration) EOF { nseq(declaration) EOF {
{decl = $1; eof = $2} {decl = $1; eof = $2}
} }
@ -97,50 +137,16 @@ program:
declaration: declaration:
type_decl { TypeDecl $1 } type_decl { TypeDecl $1 }
| const_decl { ConstDecl $1 } | const_decl { ConstDecl $1 }
| storage_decl { StorageDecl $1 }
| operations_decl { OpDecl $1 }
| lambda_decl { LambdaDecl $1 } | lambda_decl { LambdaDecl $1 }
storage_decl:
Storage var COLON type_expr option(SEMI) {
let stop =
match $5 with
None -> type_expr_to_region $4
| Some region -> region in
let region = cover $1 stop in
let value = {
kwd_storage = $1;
name = $2;
colon = $3;
store_type = $4;
terminator = $5}
in {region; value}
}
operations_decl:
Operations var COLON type_expr option(SEMI) {
let stop =
match $5 with
None -> type_expr_to_region $4
| Some region -> region in
let region = cover $1 stop in
let value = {
kwd_operations = $1;
name = $2;
colon = $3;
op_type = $4;
terminator = $5}
in {region; value}
}
(* Type declarations *) (* Type declarations *)
type_decl: type_decl:
Type type_name Is type_expr option(SEMI) { Type type_name Is type_expr option(SEMI) {
let stop = let stop =
match $5 with match $5 with
None -> type_expr_to_region $4 Some region -> region
| Some region -> region in | None -> type_expr_to_region $4 in
let region = cover $1 stop in let region = cover $1 stop in
let value = { let value = {
kwd_type = $1; kwd_type = $1;
@ -151,9 +157,9 @@ type_decl:
in {region; value}} in {region; value}}
type_expr: type_expr:
cartesian { Prod $1 } cartesian { TProd $1 }
| sum_type { Sum $1 } | sum_type { TSum $1 }
| record_type { Record $1 } | record_type { TRecord $1 }
cartesian: cartesian:
nsepseq(core_type,TIMES) { nsepseq(core_type,TIMES) {
@ -167,14 +173,19 @@ core_type:
} }
| type_name type_tuple { | type_name type_tuple {
let region = cover $1.region $2.region let region = cover $1.region $2.region
in TypeApp {region; value = $1,$2} in TApp {region; value = $1,$2}
}
| Map type_tuple {
let region = cover $1 $2.region in
let value = {value="map"; region=$1}
in TApp {region; value = value, $2}
} }
| par(type_expr) { | par(type_expr) {
ParType $1 TPar $1
} }
type_tuple: type_tuple:
par(nsepseq(type_name,COMMA)) { $1 } par(nsepseq(type_expr,COMMA)) { $1 }
sum_type: sum_type:
nsepseq(variant,VBAR) { nsepseq(variant,VBAR) {
@ -185,7 +196,8 @@ sum_type:
variant: variant:
Constr Of cartesian { Constr Of cartesian {
let region = cover $1.region $3.region let region = cover $1.region $3.region
in {region; value = $1,$2,$3} and value = {constr = $1; kwd_of = $2; product = $3}
in {region; value}
} }
record_type: record_type:
@ -194,14 +206,16 @@ record_type:
End End
{ {
let region = cover $1 $3 let region = cover $1 $3
in {region; value = $1,$2,$3} and value = {kwd_record = $1; fields = $2; kwd_end = $3}
in {region; value}
} }
field_decl: field_decl:
field_name COLON type_expr { field_name COLON type_expr {
let stop = type_expr_to_region $3 in let stop = type_expr_to_region $3 in
let region = cover $1.region stop let region = cover $1.region stop
in {region; value = $1,$2,$3} and value = {field_name = $1; colon = $2; field_type = $3}
in {region; value}
} }
(* Function and procedure declarations *) (* Function and procedure declarations *)
@ -218,8 +232,8 @@ fun_decl:
With expr option(SEMI) { With expr option(SEMI) {
let stop = let stop =
match $11 with match $11 with
None -> expr_to_region $10 Some region -> region
| Some region -> region in | None -> expr_to_region $10 in
let region = cover $1 stop in let region = cover $1 stop in
let value = { let value = {
kwd_function = $1; kwd_function = $1;
@ -236,6 +250,34 @@ fun_decl:
in {region; value} in {region; value}
} }
entry_decl:
Entrypoint fun_name entry_params COLON type_expr Is
seq(local_decl)
block
With expr option(SEMI) {
let stop =
match $11 with
Some region -> region
| None -> expr_to_region $10 in
let region = cover $1 stop in
let value = {
kwd_entrypoint = $1;
name = $2;
param = $3;
colon = $4;
ret_type = $5;
kwd_is = $6;
local_decls = $7;
block = $8;
kwd_with = $9;
return = $10;
terminator = $11}
in {region; value}
}
entry_params:
par(nsepseq(entry_param_decl,SEMI)) { $1 }
proc_decl: proc_decl:
Procedure fun_name parameters Is Procedure fun_name parameters Is
seq(local_decl) seq(local_decl)
@ -243,8 +285,8 @@ proc_decl:
{ {
let stop = let stop =
match $7 with match $7 with
None -> $6.region Some region -> region
| Some region -> region in | None -> $6.region in
let region = cover $1 stop in let region = cover $1 stop in
let value = { let value = {
kwd_procedure = $1; kwd_procedure = $1;
@ -257,27 +299,6 @@ proc_decl:
in {region; value} in {region; value}
} }
entry_decl:
Entrypoint fun_name parameters Is
seq(local_decl)
block option(SEMI)
{
let stop =
match $7 with
None -> $6.region
| Some region -> region in
let region = cover $1 stop in
let value = {
kwd_entrypoint = $1;
name = $2;
param = $3;
kwd_is = $4;
local_decls = $5;
block = $6;
terminator = $7}
in {region; value}
}
parameters: parameters:
par(nsepseq(param_decl,SEMI)) { $1 } par(nsepseq(param_decl,SEMI)) { $1 }
@ -285,104 +306,199 @@ param_decl:
Var var COLON type_expr { Var var COLON type_expr {
let stop = type_expr_to_region $4 in let stop = type_expr_to_region $4 in
let region = cover $1 stop let region = cover $1 stop
in ParamVar {region; value = $1,$2,$3,$4} and value = {
kwd_var = $1;
var = $2;
colon = $3;
param_type = $4}
in ParamVar {region; value}
} }
| Const var COLON type_expr { | Const var COLON type_expr {
let stop = type_expr_to_region $4 in let stop = type_expr_to_region $4 in
let region = cover $1 stop let region = cover $1 stop
in ParamConst {region; value = $1,$2,$3,$4} and value = {
kwd_const = $1;
var = $2;
colon = $3;
param_type = $4}
in ParamConst {region; value}
}
entry_param_decl:
param_decl {
match $1 with
ParamConst const -> EntryConst const
| ParamVar var -> EntryVar var
}
| Storage var COLON type_expr {
let stop = type_expr_to_region $4 in
let region = cover $1 stop
and value = {
kwd_storage = $1;
var = $2;
colon = $3;
storage_type = $4}
in EntryStore {region; value}
} }
block: block:
Begin Begin series(instruction) {
instruction after_instr let first, (others, terminator, close) = $2 in
{ let region = cover $1 close
let instrs, terminator, close = $3 in and value = {
let region = cover $1 close in
let value = {
opening = $1; opening = $1;
instr = (let value = $2, instrs in instr = first, others;
let region = nsepseq_to_region instr_to_region value
in {value; region});
terminator; terminator;
close} close}
in {region; value} in {region; value}
} }
after_instr:
SEMI instr_or_end {
match $2 with
`Some (instr, instrs, term, close) ->
($1, instr)::instrs, term, close
| `End close ->
[], Some $1, close
}
| End {
[], None, $1
}
instr_or_end:
End {
`End $1 }
| instruction after_instr {
let instrs, term, close = $2 in
`Some ($1, instrs, term, close)
}
local_decl: local_decl:
lambda_decl { LocalLam $1 } lambda_decl { LocalLam $1 }
| const_decl { LocalConst $1 } | const_decl { LocalConst $1 }
| var_decl { LocalVar $1 } | var_decl { LocalVar $1 }
unqualified_decl(OP):
var COLON type_expr OP extended_expr option(SEMI) {
let stop = match $6 with
Some region -> region
| None -> $5.region in
let init =
match $5.value with
`Expr e -> e
| `EList (lbracket, rbracket) ->
let region = $5.region
and value = {
lbracket;
rbracket;
colon = Region.ghost;
list_type = $3} in
let value = {
lpar = Region.ghost;
inside = value;
rpar = Region.ghost} in
EList (EmptyList {region; value})
| `ENone region ->
let value = {
lpar = Region.ghost;
inside = {
c_None = region;
colon = Region.ghost;
opt_type = $3};
rpar = Region.ghost}
in EConstr (NoneExpr {region; value})
| `EMap inj ->
EMap (MapInj inj)
in $1, $2, $3, $4, init, $6, stop
}
const_decl: const_decl:
Const var COLON type_expr EQUAL expr option(SEMI) { Const unqualified_decl(EQUAL) {
let stop = let name, colon, const_type, equal,
match $7 with init, terminator, stop = $2 in
None -> expr_to_region $6
| Some region -> region in
let region = cover $1 stop in let region = cover $1 stop in
let value = { let value = {
kwd_const = $1; kwd_const = $1;
name = $2; name;
colon = $3; colon;
const_type = $4; const_type;
equal = $5; equal;
init = $6; init;
terminator = $7} terminator}
in {region; value} in {region; value}
} }
var_decl: var_decl:
Var var COLON type_expr ASS expr option(SEMI) { Var unqualified_decl(ASS) {
let stop = let name, colon, var_type, assign,
match $7 with init, terminator, stop = $2 in
None -> expr_to_region $6
| Some region -> region in
let region = cover $1 stop in let region = cover $1 stop in
let value = { let value = {
kwd_var = $1; kwd_var = $1;
name = $2; name;
colon = $3; colon;
var_type = $4; var_type;
ass = $5; assign;
init = $6; init;
terminator = $7} terminator}
in {region; value} in {region; value}
} }
extended_expr:
expr { {region = expr_to_region $1;
value = `Expr $1} }
| LBRACKET RBRACKET { {region = cover $1 $2;
value = `EList ($1,$2)} }
| C_None { {region = $1; value = `ENone $1} }
| map_injection { {region = $1.region; value = `EMap $1} }
instruction: instruction:
single_instr { Single $1 } single_instr { Single $1 }
| block { Block $1 } | block { Block $1 }
single_instr: single_instr:
conditional { Cond $1 } conditional { Cond $1 }
| match_instr { Match $1 } | case_instr { Case $1 }
| ass { Ass $1 } | assignment { Assign $1 }
| loop { Loop $1 } | loop { Loop $1 }
| proc_call { ProcCall $1 } | proc_call { ProcCall $1 }
| Null { Null $1 } | fail_instr { Fail $1 }
| Fail expr { let region = cover $1 (expr_to_region $2) | Skip { Skip $1 }
in Fail {region; value = $1,$2} } | record_patch { RecordPatch $1 }
| map_patch { MapPatch $1 }
map_patch:
Patch path With map_injection {
let region = cover $1 $4.region in
let value = {
kwd_patch = $1;
path = $2;
kwd_with = $3;
map_inj = $4}
in {region; value}
}
map_injection:
Map series(binding) {
let first, (others, terminator, close) = $2 in
let region = cover $1 close
and value = {
opening = $1;
bindings = first, others;
terminator;
close}
in {region; value}
}
binding:
expr ARROW expr {
let start = expr_to_region $1
and stop = expr_to_region $3 in
let region = cover start stop
and value = {
source = $1;
arrow = $2;
image = $3}
in {region; value}
}
record_patch:
Patch path With record_injection {
let region = cover $1 $4.region in
let value = {
kwd_patch = $1;
path = $2;
kwd_with = $3;
record_inj = $4}
in {region; value}
}
fail_instr:
Fail expr {
let region = cover $1 (expr_to_region $2)
and value = {kwd_fail = $1; fail_expr = $2}
in {region; value}}
proc_call: proc_call:
fun_call { $1 } fun_call { $1 }
@ -400,13 +516,13 @@ conditional:
in {region; value} in {region; value}
} }
match_instr: case_instr:
Match expr With option(VBAR) cases End { Case expr Of option(VBAR) cases End {
let region = cover $1 $6 in let region = cover $1 $6 in
let value = { let value = {
kwd_match = $1; kwd_case = $1;
expr = $2; expr = $2;
kwd_with = $3; kwd_of = $3;
lead_vbar = $4; lead_vbar = $4;
cases = $5; cases = $5;
kwd_end = $6} kwd_end = $6}
@ -421,16 +537,27 @@ cases:
case: case:
pattern ARROW instruction { pattern ARROW instruction {
let region = cover $1.region (instr_to_region $3) let region = cover (pattern_to_region $1) (instr_to_region $3)
in {region; value = $1,$2,$3} and value = {pattern = $1; arrow = $2; instr = $3}
in {region; value}
} }
ass: assignment:
var ASS expr { lhs ASS rhs {
let region = cover $1.region (expr_to_region $3) let stop = rhs_to_region $3 in
in {region; value = $1,$2,$3} let region = cover (lhs_to_region $1) stop
and value = {lhs = $1; assign = $2; rhs = $3}
in {region; value}
} }
rhs:
expr { Expr $1 }
| C_None { NoneExpr $1 : rhs }
lhs:
path { Path $1 }
| map_lookup { MapPath $1 }
loop: loop:
while_loop { $1 } while_loop { $1 }
| for_loop { $1 } | for_loop { $1 }
@ -438,39 +565,45 @@ loop:
while_loop: while_loop:
While expr block { While expr block {
let region = cover $1 $3.region let region = cover $1 $3.region
in While {region; value=$1,$2,$3} and value = {
kwd_while = $1;
cond = $2;
block = $3}
in While {region; value}
} }
for_loop: for_loop:
For ass Down? To expr option(step_clause) block { For var_assign Down? To expr option(step_clause) block {
let region = cover $1 $7.region in let region = cover $1 $7.region in
let value = let value = {
{
kwd_for = $1; kwd_for = $1;
ass = $2; assign = $2;
down = $3; down = $3;
kwd_to = $4; kwd_to = $4;
bound = $5; bound = $5;
step = $6; step = $6;
block = $7; block = $7}
}
in For (ForInt {region; value}) in For (ForInt {region; value})
} }
| For var option(arrow_clause) In expr block { | For var option(arrow_clause) In expr block {
let region = cover $1 $6.region in let region = cover $1 $6.region in
let value = let value = {
{
kwd_for = $1; kwd_for = $1;
var = $2; var = $2;
bind_to = $3; bind_to = $3;
kwd_in = $4; kwd_in = $4;
expr = $5; expr = $5;
block = $6; block = $6}
}
in For (ForCollect {region; value}) in For (ForCollect {region; value})
} }
var_assign:
var ASS expr {
let region = cover $1.region (expr_to_region $3)
and value = {name = $1; assign = $2; expr = $3}
in {region; value}
}
step_clause: step_clause:
Step expr { $1,$2 } Step expr { $1,$2 }
@ -483,20 +616,22 @@ interactive_expr:
expr EOF { $1 } expr EOF { $1 }
expr: expr:
expr OR conj_expr { expr Or conj_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Or {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3} in
ELogic (BoolExpr (Or {region; value}))
} }
| conj_expr { $1 } | conj_expr { $1 }
conj_expr: conj_expr:
conj_expr AND comp_expr { conj_expr And comp_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
And {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (BoolExpr (And {region; value}))
} }
| comp_expr { $1 } | comp_expr { $1 }
@ -504,38 +639,44 @@ comp_expr:
comp_expr LT cat_expr { comp_expr LT cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Lt {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Lt {region; value}))
} }
| comp_expr LEQ cat_expr { | comp_expr LEQ cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Leq {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Leq {region; value}))
} }
| comp_expr GT cat_expr { | comp_expr GT cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Gt {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Gt {region; value}))
} }
| comp_expr GEQ cat_expr { | comp_expr GEQ cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Geq {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Geq {region; value}))
} }
| comp_expr EQUAL cat_expr { | comp_expr EQUAL cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Equal {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Equal {region; value}))
} }
| comp_expr NEQ cat_expr { | comp_expr NEQ cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Neq {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in ELogic (CompExpr (Neq {region; value}))
} }
| cat_expr { $1 } | cat_expr { $1 }
@ -543,8 +684,9 @@ cat_expr:
cons_expr CAT cat_expr { cons_expr CAT cat_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Cat {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EString (Cat {region; value})
} }
| cons_expr { $1 } | cons_expr { $1 }
@ -552,8 +694,9 @@ cons_expr:
add_expr CONS cons_expr { add_expr CONS cons_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Cons {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EList (Cons {region; value})
} }
| add_expr { $1 } | add_expr { $1 }
@ -561,14 +704,16 @@ add_expr:
add_expr PLUS mult_expr { add_expr PLUS mult_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Add {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EArith (Add {region; value})
} }
| add_expr MINUS mult_expr { | add_expr MINUS mult_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Sub {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EArith (Sub {region; value})
} }
| mult_expr { $1 } | mult_expr { $1 }
@ -576,68 +721,118 @@ mult_expr:
mult_expr TIMES unary_expr { mult_expr TIMES unary_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Mult {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EArith (Mult {region; value})
} }
| mult_expr SLASH unary_expr { | mult_expr SLASH unary_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Div {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EArith (Div {region; value})
} }
| mult_expr Mod unary_expr { | mult_expr Mod unary_expr {
let start = expr_to_region $1 let start = expr_to_region $1
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop
Mod {region; value = $1,$2,$3} and value = {arg1 = $1; op = $2; arg2 = $3}
in EArith (Mod {region; value})
} }
| unary_expr { $1 } | unary_expr { $1 }
unary_expr: unary_expr:
MINUS core_expr { MINUS core_expr {
let stop = expr_to_region $2 in let stop = expr_to_region $2 in
let region = cover $1 stop in let region = cover $1 stop
Neg {region; value = $1,$2} and value = {op = $1; arg = $2}
in EArith (Neg {region; value})
} }
| Not core_expr { | Not core_expr {
let stop = expr_to_region $2 in let stop = expr_to_region $2 in
let region = cover $1 stop in let region = cover $1 stop
Not {region; value = $1,$2} and value = {op = $1; arg = $2} in
ELogic (BoolExpr (Not {region; value}))
} }
| core_expr { $1 } | core_expr { $1 }
core_expr: core_expr:
Int { Int $1 } Int { EArith (Int $1) }
| var { Var $1 } | var { EVar $1 }
| String { String $1 } | String { EString (String $1) }
| Bytes { Bytes $1 } | Bytes { EBytes $1 }
| C_False { False $1 } | C_False { ELogic (BoolExpr (False $1)) }
| C_True { True $1 } | C_True { ELogic (BoolExpr (True $1)) }
| C_Unit { Unit $1 } | C_Unit { EUnit $1 }
| tuple { Tuple $1 } | tuple { ETuple $1 }
| list_expr { List $1 } | list_expr { EList (List $1) }
| empty_list { EmptyList $1 } | empty_list { EList (EmptyList $1) }
| set_expr { Set $1 } | set_expr { ESet (Set $1) }
| empty_set { EmptySet $1 } | empty_set { ESet (EmptySet $1) }
| none_expr { NoneExpr $1 } | none_expr { EConstr (NoneExpr $1) }
| fun_call { FunCall $1 } | fun_call { ECall $1 }
| map_expr { EMap $1 }
| record_expr { ERecord $1 }
| Constr arguments { | Constr arguments {
let region = cover $1.region $2.region in let region = cover $1.region $2.region in
ConstrApp {region; value = $1,$2} EConstr (ConstrApp {region; value = $1,$2})
} }
| C_Some arguments { | C_Some arguments {
let region = cover $1 $2.region in let region = cover $1 $2.region in
SomeApp {region; value = $1,$2} EConstr (SomeApp {region; value = $1,$2})
} }
| map_name DOT brackets(expr) {
let region = cover $1.region $3.region in map_expr:
let value = map_lookup { MapLookUp $1 }
{
map_name = $1; path:
var { Name $1 }
| record_projection { RecordPath $1 }
map_lookup:
path brackets(expr) {
let region = cover (path_to_region $1) $2.region in
let value = {
path = $1;
index = $2}
in {region; value}
}
record_expr:
record_injection { RecordInj $1 }
| record_projection { RecordProj $1 }
record_injection:
Record series(field_assignment) {
let first, (others, terminator, close) = $2 in
let region = cover $1 close
and value = {
opening = $1;
fields = first, others;
terminator;
close}
in {region; value}
}
field_assignment:
field_name EQUAL expr {
let region = cover $1.region (expr_to_region $3)
and value = {
field_name = $1;
equal = $2;
field_expr = $3}
in {region; value}
}
record_projection:
record_name DOT nsepseq(field_name,DOT) {
let stop = nsepseq_to_region (fun x -> x.region) $3 in
let region = cover $1.region stop
and value = {
record_name = $1;
selector = $2; selector = $2;
index = $3; field_path = $3}
} in {region; value}
in MapLookUp {region; value}
} }
fun_call: fun_call:
@ -656,23 +851,46 @@ list_expr:
brackets(nsepseq(expr,COMMA)) { $1 } brackets(nsepseq(expr,COMMA)) { $1 }
empty_list: empty_list:
par(LBRACKET RBRACKET COLON type_expr { $1,$2,$3,$4 }) { $1 } par(typed_empty_list) { $1 }
typed_empty_list:
LBRACKET RBRACKET COLON type_expr {
{lbracket = $1;
rbracket = $2;
colon = $3;
list_type = $4}
}
set_expr: set_expr:
braces(nsepseq(expr,COMMA)) { $1 } braces(nsepseq(expr,COMMA)) { $1 }
empty_set: empty_set:
par(LBRACE RBRACE COLON type_expr { $1,$2,$3,$4 }) { $1 } par(typed_empty_set) { $1 }
typed_empty_set:
LBRACE RBRACE COLON type_expr {
{lbrace = $1;
rbrace = $2;
colon = $3;
set_type = $4}
}
none_expr: none_expr:
par(C_None COLON type_expr { $1,$2,$3 }) { $1 } par(typed_none_expr) { $1 }
typed_none_expr:
C_None COLON type_expr {
{c_None = $1;
colon = $2;
opt_type = $3}
}
(* Patterns *) (* Patterns *)
pattern: pattern:
nsepseq(core_pattern,CONS) { nsepseq(core_pattern,CONS) {
let region = nsepseq_to_region core_pattern_to_region $1 let region = nsepseq_to_region pattern_to_region $1
in {region; value=$1} in PCons {region; value=$1}
} }
core_pattern: core_pattern:

View File

@ -1,4 +1,4 @@
(* Driver for the parser of Ligo *) (* Driver for the parser of LIGO *)
open! EvalOpt (* Reads the command-line options: Effectful! *) open! EvalOpt (* Reads the command-line options: Effectful! *)
@ -25,14 +25,13 @@ let print_error ?(offsets=true) mode Region.{region; value} =
let reg = region#to_string ~file ~offsets mode in let reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg) Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
(* Path to the Ligo standard library *) (* Path for CPP inclusions (#include) *)
(*
let lib_path = let lib_path =
match EvalOpt.libs with match EvalOpt.libs with
[] -> "" [] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path | libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in List.fold_right mk_I libs ""
*)
(* Preprocessing the input source and opening the input channels *) (* Preprocessing the input source and opening the input channels *)
@ -52,9 +51,11 @@ let pp_input =
let cpp_cmd = let cpp_cmd =
match EvalOpt.input with match EvalOpt.input with
None | Some "-" -> None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp - -o %s" pp_input Printf.sprintf "cpp -traditional-cpp%s - -o %s"
lib_path pp_input
| Some file -> | Some file ->
Printf.sprintf "cpp -traditional-cpp %s -o %s" file pp_input Printf.sprintf "cpp -traditional-cpp%s %s -o %s"
lib_path file pp_input
let () = let () =
if Utils.String.Set.mem "cpp" EvalOpt.verbose if Utils.String.Set.mem "cpp" EvalOpt.verbose
@ -84,7 +85,7 @@ let tokeniser = read ~log
let () = let () =
try try
let ast = Parser.program tokeniser buffer in let ast = Parser.contract tokeniser buffer in
if Utils.String.Set.mem "ast" EvalOpt.verbose if Utils.String.Set.mem "ast" EvalOpt.verbose
then AST.print_tokens ast then AST.print_tokens ast
with with
@ -98,12 +99,14 @@ let () =
print_error ~offsets EvalOpt.mode error print_error ~offsets EvalOpt.mode error
| Sys_error msg -> Utils.highlight msg | Sys_error msg -> Utils.highlight msg
(*
(* Temporary: force dune to build AST2.ml *) (* Temporary: force dune to build AST2.ml *)
let () = let () =
let open AST2 in let open AST2 in
let _ = s_ast in let _ = s_ast in
() ()
(*
(* Temporary: force dune to build AST2.ml *) (* Temporary: force dune to build AST2.ml *)
let () = let () =
if false then if false then
@ -111,3 +114,5 @@ let () =
() ()
else else
() ()
*)
*)

View File

@ -7,6 +7,8 @@ type t = <
set_file : string -> t; set_file : string -> t;
set_line : int -> t; set_line : int -> t;
set_offset : int -> t;
set : file:string -> line:int -> offset:int -> t;
new_line : string -> t; new_line : string -> t;
add_nl : t; add_nl : t;
@ -44,8 +46,20 @@ let make ~byte ~point_num ~point_bol =
val point_bol = point_bol val point_bol = point_bol
method point_bol = point_bol method point_bol = point_bol
method set_file file = {< byte = Lexing.{byte with pos_fname = file} >} method set_file file =
method set_line line = {< byte = Lexing.{byte with pos_lnum = line} >} {< byte = Lexing.{byte with pos_fname = file} >}
method set_line line =
{< byte = Lexing.{byte with pos_lnum = line} >}
method set_offset offset =
{< byte = Lexing.{byte with pos_cnum = byte.pos_bol + offset} >}
method set ~file ~line ~offset =
let pos = self#set_file file in
let pos = pos#set_line line in
let pos = pos#set_offset offset
in pos
(* The string must not contain '\n'. See [new_line]. *) (* The string must not contain '\n'. See [new_line]. *)

View File

@ -36,6 +36,8 @@ type t = <
set_file : string -> t; set_file : string -> t;
set_line : int -> t; set_line : int -> t;
set_offset : int -> t;
set : file:string -> line:int -> offset:int -> t;
(* The call [pos#new_line s], where the string [s] is either "\n" or (* The call [pos#new_line s], where the string [s] is either "\n" or
"\c\r", updates the position [pos] with a new line. *) "\c\r", updates the position [pos] with a new line. *)

View File

@ -10,6 +10,7 @@ type t = <
shift_bytes : int -> t; shift_bytes : int -> t;
shift_one_uchar : int -> t; shift_one_uchar : int -> t;
set_file : string -> t;
(* Getters *) (* Getters *)
@ -55,6 +56,11 @@ let make ~(start: Pos.t) ~(stop: Pos.t) =
and stop = stop#shift_one_uchar len and stop = stop#shift_one_uchar len
in {< start = start; stop = stop >} in {< start = start; stop = stop >}
method set_file name =
let start = start#set_file name
and stop = stop#set_file name
in {< start = start; stop = stop >}
(* Getters *) (* Getters *)
method file = start#file method file = start#file

View File

@ -24,10 +24,12 @@ type t = <
translation of region [region] of [n] bytes forward in the translation of region [region] of [n] bytes forward in the
file. The call [region#shift_one_uchar n] is similar, except that file. The call [region#shift_one_uchar n] is similar, except that
it assumes that [n] is the number of bytes making up one unicode it assumes that [n] is the number of bytes making up one unicode
point. *) point. The call [region#set_file f] sets the file name to be
[f]. *)
shift_bytes : int -> t; shift_bytes : int -> t;
shift_one_uchar : int -> t; shift_one_uchar : int -> t;
set_file : string -> t;
(* Getters *) (* Getters *)

View File

@ -18,7 +18,7 @@ module O = struct
PVar of var_name PVar of var_name
| PWild | PWild
| PInt of Z.t | PInt of Z.t
| PBytes of MBytes.t | PBytes of Hex.t
| PString of string | PString of string
| PUnit | PUnit
| PFalse | PFalse
@ -42,15 +42,16 @@ module O = struct
| Function of { arg: type_expr; ret: type_expr } | Function of { arg: type_expr; ret: type_expr }
| Ref of type_expr | Ref of type_expr
| String | String
| Bytes
| Int | Int
| Unit | Unit
| Bool | Bool
and type_expr = { type_expr: type_expr_case; name: string option; orig: Region.t } and type_expr = { type_expr: type_expr_case; name: type_name option; orig: Region.t }
type typed_var = { name:var_name; ty:type_expr; orig: asttodo } type typed_var = { name:var_name; ty:type_expr; orig: asttodo }
type type_decl = { name:string; ty:type_expr; orig: asttodo } type type_decl = { name: type_name; ty:type_expr; orig: asttodo }
type expr_case = type expr_case =
App of { operator: operator; arguments: expr list } App of { operator: operator; arguments: expr list }
@ -84,7 +85,7 @@ module O = struct
and constant = and constant =
Unit Unit
| Int of Z.t | String of string | Bytes of MBytes.t | Int of Z.t | String of string | Bytes of Hex.t
| False | True | False | True
| Null | Null
| EmptySet | EmptySet
@ -117,29 +118,51 @@ let fold_map f a l =
let last_acc, last_l = List.fold_left f (a, []) l let last_acc, last_l = List.fold_left f (a, []) l
in last_acc, List.rev last_l in last_acc, List.rev last_l
let map f l = List.rev (List.rev_map f l)
let shadow (name : string) (typ : O.type_expr) (env : O.type_expr list SMap.t)
: O.type_expr list SMap.t =
SMap.update name (function None -> Some [typ] | Some tl -> Some (typ :: tl)) env
let lookup (name : string) (env : O.type_expr list SMap.t) : O.type_expr =
match SMap.find name env with
latest :: shadowed -> latest
| [] -> failwith "Unbound variable"
let string_of_name ({name;_} : I.name_and_region) = name
let a_name_and_region ({name; orig} : I.name_and_region) : O.name_and_region =
{name; orig}
let a_type_constructor (tve : tve) : I.type_constructor -> O.type_constructor = function let a_type_constructor (tve : tve) : I.type_constructor -> O.type_constructor = function
Option -> failwith "TODO" Option -> Option
| List -> failwith "TODO" | List -> List
| Set -> failwith "TODO" | Set -> Set
| Map -> failwith "TODO" | Map -> Map
let a_type_expr_case (tve : tve) : I.type_expr_case -> O.type_expr_case = function let a_type_expr_case (tve : tve) : I.type_expr_case -> O.type_expr_case = function
Sum l -> failwith "TODO" Sum lt -> failwith "TODO"
| Record l -> failwith "TODO" | Record lt -> failwith "TODO"
| TypeApp (tc, args) -> failwith "TODO" | TypeApp (tc, args) -> failwith "TODO"
| Function {arg;ret} -> failwith "TODO" | Function {arg;ret} -> failwith "TODO"
| Ref t -> failwith "TODO" | Ref t -> failwith "TODO"
| String -> failwith "TODO" | String -> String
| Int -> failwith "TODO" | Int -> Int
| Unit -> failwith "TODO" | Unit -> Unit
| Bool -> failwith "TODO" | Bool -> Bool
let a_type_expr (tve : tve) ({type_expr;name;orig} : I.type_expr) : O.type_expr = let a_type_expr (tve : tve) ({type_expr;name;orig} : I.type_expr) : O.type_expr =
failwith "TODO" let type_expr = a_type_expr_case tve type_expr in
let name = match name with
None -> None
|Some name -> Some (a_name_and_region name)
in {type_expr;name;orig}
let a_type (tve : tve) ({name;ty;orig} : I.type_decl) : tve * O.type_decl = let a_type (te,ve : tve) ({name;ty;orig} : I.type_decl) : tve * O.type_decl =
failwith "TODO" let ty = a_type_expr (te,ve) ty in
let tve = shadow (string_of_name name) ty te, ve in
let name = (a_name_and_region name) in
tve, {name; ty; orig}
let a_types (tve : tve) (l : I.type_decl list) : tve * O.type_decl list = let a_types (tve : tve) (l : I.type_decl list) : tve * O.type_decl list =
fold_map a_type tve l fold_map a_type tve l
@ -147,9 +170,99 @@ let a_types (tve : tve) (l : I.type_decl list) : tve * O.type_decl list =
let a_storage_decl : tve -> I.typed_var -> tve * O.typed_var = let a_storage_decl : tve -> I.typed_var -> tve * O.typed_var =
failwith "TODO" failwith "TODO"
let a_declarations : tve -> I.decl list -> tve * O.decl list = let type_expr_case_equal (t1 : O.type_expr_case) (t2 : O.type_expr_case) : bool = match t1,t2 with
Sum m1, Sum m2 -> failwith "TODO" (* of (O.name_and_region * O.type_expr) SMap.t *)
| Record m1, Record m2 -> failwith "TODO" (* of (O.name_and_region * O.type_expr) SMap.t *)
| TypeApp (tc1, args1), TypeApp (tc2, args2) -> failwith "TODO" (* of O.type_constructor * O.type_expr list *)
| Function {arg=arg1;ret=ret1}, Function {arg=arg2;ret=ret2} -> failwith "TODO" (* of { arg : O.type_expr; ret : O.type_expr; } *)
| Ref t1, Ref t2 -> failwith "TODO" (* of O.type_expr *)
| String, String -> true
| Int, Int -> true
| Unit, Unit -> true
| Bool, Bool -> true
| _ -> false
let type_expr_equal (t1 : O.type_expr) (t2 : O.type_expr) : bool =
type_expr_case_equal t1.type_expr t2.type_expr
let check_type_expr_equal (expected : O.type_expr) (actual : O.type_expr) : unit =
if type_expr_equal expected actual then
()
else
failwith "got [actual] but expected [expected]"
let a_var_expr (te,ve : tve) (expected : O.type_expr) (var_name : I.name_and_region) : O.expr_case =
check_type_expr_equal expected (lookup (string_of_name var_name) ve);
Var { name = a_name_and_region var_name;
ty = expected;
orig = `TODO }
let a_constant_expr (tve : tve) (expected : O.type_expr) (constant : I.constant) : O.expr_case =
let to_type_expr type_expr_case : O.type_expr =
{ type_expr = type_expr_case; name = None; orig = Region.ghost } in
let actual : O.type_expr = match constant with
Unit -> to_type_expr Unit
| Int _ -> to_type_expr Int
| String _ -> to_type_expr String
| Bytes _ -> to_type_expr Bytes
| False -> to_type_expr Bool
| True -> to_type_expr Bool
| Null t -> a_type_expr tve t
| EmptySet t -> a_type_expr tve t
| CNone t -> a_type_expr tve t
in
check_type_expr_equal expected actual;
let c : O.constant = match constant with
Unit -> Unit
| Int i -> Int i
| String s -> String s
| Bytes b -> Bytes b
| False -> False
| True -> True
| Null _ -> Null
| EmptySet _ -> EmptySet
| CNone _ -> CNone
in Constant c
let map_to_list m =
List.rev (SMap.fold (fun field_name_string p l -> p :: l) m [])
let a_field tve (expected,expr) =
failwith "TODO" failwith "TODO"
let a_record (tve : tve) (expected : O.type_expr) (record : (I.field_name * I.expr) list)
: O.expr_case =
let {type_expr = expected; _} : O.type_expr = expected in
let expected = match expected with
Record fields -> fields
| _ -> failwith "expected some_type but got record" in
let expected_and_field =
List.combine
(map_to_list expected)
record (* TODO SHOULD BE (map_to_list record) *) in
Record (map (a_field tve) expected_and_field)
let a_expr_case (te,ve : tve) (expected : O.type_expr) : I.expr -> O.expr_case = function
App {operator;arguments} -> failwith "TODO"
| Var var_name -> a_var_expr (te,ve) expected var_name
| Constant constant -> a_constant_expr (te,ve) expected constant
| Record record -> a_record (te,ve) expected record
| Lambda lambda -> failwith "TODO"
let a_expr (te,ve : tve) (expected : O.type_expr) (e : I.expr) : O.expr =
let expr_case = a_expr_case (te,ve) expected e in
{ expr = expr_case; ty = expected; orig = `TODO }
let a_declaration (te,ve : tve) ({name;ty;value} : I.decl) : tve * O.decl =
let ty = a_type_expr (te,ve) ty in
let value = a_expr (te,ve) ty value in
let ve = shadow (string_of_name name) ty ve in
let name = a_name_and_region name in
(te,ve), {var={name;ty;orig=`TODO};value;orig = `TODO}
let a_declarations (tve : tve) (l : I.decl list) : tve * O.decl list =
fold_map a_declaration tve l
let a_ast I.{types; storage_decl; declarations; orig} = let a_ast I.{types; storage_decl; declarations; orig} =
let tve = SMap.empty, SMap.empty in let tve = SMap.empty, SMap.empty in
let tve, types = a_types tve types in let tve, types = a_types tve types in
@ -159,4 +272,3 @@ let a_ast I.{types; storage_decl; declarations; orig} =
O.{types; storage_decl; declarations; orig} O.{types; storage_decl; declarations; orig}
let annotate : I.ast -> O.ast = a_ast let annotate : I.ast -> O.ast = a_ast

View File

@ -16,7 +16,7 @@ module O : sig
PVar of var_name PVar of var_name
| PWild | PWild
| PInt of Z.t | PInt of Z.t
| PBytes of MBytes.t | PBytes of Hex.t
| PString of string | PString of string
| PUnit | PUnit
| PFalse | PFalse
@ -40,15 +40,16 @@ module O : sig
| Function of { arg: type_expr; ret: type_expr } | Function of { arg: type_expr; ret: type_expr }
| Ref of type_expr | Ref of type_expr
| String | String
| Bytes
| Int | Int
| Unit | Unit
| Bool | Bool
and type_expr = { type_expr: type_expr_case; name: string option; orig: Region.t } and type_expr = { type_expr: type_expr_case; name: type_name option; orig: Region.t }
type typed_var = { name:var_name; ty:type_expr; orig: asttodo } type typed_var = { name:var_name; ty:type_expr; orig: asttodo }
type type_decl = { name:string; ty:type_expr; orig: asttodo } type type_decl = { name:type_name; ty:type_expr; orig: asttodo }
type expr_case = type expr_case =
App of { operator: operator; arguments: expr list } App of { operator: operator; arguments: expr list }
@ -82,7 +83,7 @@ module O : sig
and constant = and constant =
Unit Unit
| Int of Z.t | String of string | Bytes of MBytes.t | Int of Z.t | String of string | Bytes of Hex.t
| False | True | False | True
| Null | Null
| EmptySet | EmptySet

View File

@ -6,31 +6,65 @@
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --explain --external-tokens LexToken)) (flags -la 1 --explain --external-tokens LexToken))
(library (executables
(name ligo_parser) (names LexerMain ParserMain)
(public_name ligo-parser) (public_names ligo-lexer ligo-parser)
(package ligo-parser)
(modules_without_implementation Error) (modules_without_implementation Error)
(libraries getopt hex str uutf zarith) (libraries getopt hex str uutf zarith))
)
;; (executables
;; (names LexerMain ParserMain)
;; (public_names ligo-lexer ligo-parser)
;; (package ligo-parser)
;; (modules_without_implementation Error)
;; (libraries getopt hex str uutf zarith))
;; Les deux directives (rule) qui suivent sont pour le dev local. ;; Les deux directives (rule) qui suivent sont pour le dev local.
;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier. ;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier.
;; Pour le purger, il faut faire "dune clean". ;; Pour le purger, il faut faire "dune clean".
;; (rule (rule
;; (targets Parser.exe) (targets Parser.exe)
;; (deps ParserMain.exe) (deps ParserMain.exe)
;; (action (copy ParserMain.exe Parser.exe)) (action (copy ParserMain.exe Parser.exe))
;; (mode promote-until-clean)) (mode promote-until-clean))
;; (rule (rule
;; (targets Lexer.exe) (targets Lexer.exe)
;; (deps LexerMain.exe) (deps LexerMain.exe)
;; (action (copy LexerMain.exe Lexer.exe)) (action (copy LexerMain.exe Lexer.exe))
;; (mode promote-until-clean)) (mode promote-until-clean))
(rule
(targets dot_git_is_dir)
(deps check_dot_git_is_dir.sh)
(action (run "sh" "check_dot_git_is_dir.sh")))
(rule
(targets .git_main_dir)
(deps dot_git_is_dir check_dot_git_is_dir.sh)
(action
(progn (run "sh" "check_dot_git_is_dir.sh")
(run "sh" "-c" "if \"$(cat dot_git_is_dir)\" = true; then printf %s '../../.git' > .git_main_dir; else cat ../../.git | sed -e 's/^gitdir: //' | sed -e 's|$|/../..|' > .git_main_dir; fi"))))
(rule
(targets .git_worktree_dir)
(deps dot_git_is_dir check_dot_git_is_dir.sh)
(action
(progn (run "sh" "check_dot_git_is_dir.sh")
(run "sh" "-c" "if \"$(cat dot_git_is_dir)\" = true; then printf %s '../../.git' > .git_worktree_dir; else cat ../../.git | sed -e 's/^gitdir: //' > .git_worktree_dir; fi"))))
(rule
(targets .gitHEAD)
(deps .git_main_dir .git_worktree_dir check_dot_git_is_dir.sh)
(action
(progn (run "sh" "check_dot_git_is_dir.sh")
(run "sh" "-c" "ln -s \"$(cat .git_worktree_dir)/HEAD\" .gitHEAD"))))
(rule
(targets Version.gitHEAD)
(deps .gitHEAD check_dot_git_is_dir.sh)
(action
(progn (run "sh" "check_dot_git_is_dir.sh")
(run "sh" "-c" "if git symbolic-ref HEAD >/dev/null 2>&1; then ln -s \"$(cat .git_main_dir)/$(git symbolic-ref HEAD)\" Version.gitHEAD; else ln -s \"$(cat .git_worktree_dir)/HEAD\" Version.gitHEAD; fi"))))
(rule
(targets Version.ml)
(deps Version.gitHEAD check_dot_git_is_dir.sh)
(action
(progn (run "sh" "check_dot_git_is_dir.sh")
(run "sh" "-c" "printf 'let version = \"%s\"'\\\\n \"$(git describe --always --dirty --abbrev=0)\" > Version.ml")))
(mode promote-until-clean))

View File

@ -2,9 +2,9 @@ opam-version : "2.0"
version : "1.0" version : "1.0"
maintainer : "gabriel.alfour@gmail.com" maintainer : "gabriel.alfour@gmail.com"
authors : [ "Galfour" ] authors : [ "Galfour" ]
homepage : "https://gitlab.com/gabriel.alfour/tezos" homepage : "https://gitlab.com/gabriel.alfour/ligo-parser"
bug-reports : "https://gitlab.com/gabriel.alfour/tezos/issues" bug-reports : "https://gitlab.com/gabriel.alfour/ligo-parser/issues"
dev-repo : "git+https://gitlab.com/gabriel.alfour/tezos.git" dev-repo : "git+https://gitlab.com/gabriel.alfour/ligo-parser.git"
license : "MIT" license : "MIT"
depends : [ "dune" "menhir" "hex" "zarith" "getopt" "uutf" ] depends : [ "dune" "menhir" "hex" "zarith" "getopt" "uutf" ]
@ -15,5 +15,5 @@ build : [
] ]
url { url {
src: "https://gitlab.com/gabriel.alfour/tezos/-/archive/master/tezos.tar.gz" src: "https://gitlab.com/gabriel.alfour/ligo-parser/-/archive/master/ligo-parser.tar.gz"
} }

View File

@ -2,13 +2,13 @@ open Ligo_parser
module Parser = Parser module Parser = Parser
module Lexer = Lexer module Lexer = Lexer
module CST = AST module AST_Raw = AST
module AST = AST2 module AST_Simplified = Ast_simplified
module Typed = Typed module AST_Typed = Ast_typed
module Mini_c = Mini_c module Mini_c = Mini_c
open Ligo_helpers.Trace open Ligo_helpers.Trace
let parse_file (source:string) : CST.t result = let parse_file (source:string) : AST_Raw.t result =
let channel = open_in source in let channel = open_in source in
let lexbuf = Lexing.from_channel channel in let lexbuf = Lexing.from_channel channel in
let Lexer.{read ; _} = let Lexer.{read ; _} =
@ -25,10 +25,10 @@ let parse_file (source:string) : CST.t result =
simple_error str simple_error str
) )
| _ -> simple_error "unrecognized parse_ error" | _ -> simple_error "unrecognized parse_ error"
) @@ (fun () -> Parser.program read lexbuf) >>? fun program_cst -> ) @@ (fun () -> Parser.contract read lexbuf) >>? fun program_cst ->
ok program_cst ok program_cst
let parse (s:string) : CST.t result = let parse (s:string) : AST_Raw.t result =
let lexbuf = Lexing.from_string s in let lexbuf = Lexing.from_string s in
let Lexer.{read ; _} = let Lexer.{read ; _} =
Lexer.open_token_stream None in Lexer.open_token_stream None in
@ -44,10 +44,12 @@ let parse (s:string) : CST.t result =
simple_error str simple_error str
) )
| _ -> simple_error "unrecognized parse_ error" | _ -> simple_error "unrecognized parse_ error"
) @@ (fun () -> Parser.program read lexbuf) >>? fun program_cst -> ) @@ (fun () -> Parser.contract read lexbuf) >>? fun program_cst ->
ok program_cst ok program_cst
let abstract (cst:CST.t) : AST.O.ast result = ok @@ AST.s_ast cst let abstract (p:AST_Raw.t) : AST_Simplified.program result = AST_Simplified.Simplify.program p
let annotate_types (ast:AST.O.ast) = ok @@ Typed.annotate ast let annotate_types (p:AST_Simplified.program) : AST_Typed.program result = Typer.type_program p
let transpile (p:AST_Typed.program) : Mini_c.program result = Transpiler.translate_program p

File diff suppressed because it is too large Load Diff

210
src/ligo/transpiler.ml Normal file
View File

@ -0,0 +1,210 @@
open! Ligo_helpers.Trace
open Mini_c
module AST = Ast_typed
let list_of_map m = List.rev @@ Ligo_helpers.X_map.String.fold (fun _ v prev -> v :: prev) m []
let kv_list_of_map m = List.rev @@ Ligo_helpers.X_map.String.fold (fun k v prev -> (k, v) :: prev) m []
let rec translate_type (t:AST.type_value) : type_value result =
match t with
| Type_constant ("bool", []) -> ok (`Base Bool)
| Type_constant ("int", []) -> ok (`Base Int)
| Type_constant ("string", []) -> ok (`Base String)
| Type_sum m ->
let node = Append_tree.of_list @@ list_of_map m in
let aux a b : type_value result =
let%bind a = a in
let%bind b = b in
ok (`Or (a, b))
in
Append_tree.fold_ne translate_type aux node
| Type_record m ->
let node = Append_tree.of_list @@ list_of_map m in
let aux a b : type_value result =
let%bind a = a in
let%bind b = b in
ok (`Pair (a, b))
in
Append_tree.fold_ne translate_type aux node
| Type_tuple lst ->
let node = Append_tree.of_list lst in
let aux a b : type_value result =
let%bind a = a in
let%bind b = b in
ok (`Pair (a, b))
in
Append_tree.fold_ne translate_type aux node
| _ -> simple_fail "todo"
let rec translate_block env (b:AST.block) : block result =
let env' = Environment.extend env in
let%bind instructions = bind_list @@ List.map (translate_instruction env) b in
ok (instructions, env')
and translate_instruction (env:Environment.t) (i:AST.instruction) : statement result =
match i with
| Assignment {name;annotated_expression} ->
let%bind expression = translate_annotated_expression env annotated_expression in
ok @@ (Assignment (name, expression), env)
| Matching (expr, Match_bool {match_true ; match_false}) ->
let%bind expr' = translate_annotated_expression env expr in
let%bind true_branch = translate_block env match_true in
let%bind false_branch = translate_block env match_false in
ok @@ (Cond (expr', true_branch, false_branch), env)
| Loop (expr, body) ->
let%bind expr' = translate_annotated_expression env expr in
let%bind body' = translate_block env body in
ok @@ (While (expr', body'), env)
| _ -> simple_fail "todo"
and translate_annotated_expression (env:Environment.t) (ae:AST.annotated_expression) : expression result =
let%bind tv = translate_type ae.type_annotation in
match ae.expression with
| Literal (Bool b) -> ok (Literal (`Bool b), tv, env)
| Literal (Int n) -> ok (Literal (`Int n), tv, env)
| Literal (Nat n) -> ok (Literal (`Nat n), tv, env)
| Literal (Bytes s) -> ok (Literal (`Bytes s), tv, env)
| Literal (String s) -> ok (Literal (`String s), tv, env)
| Variable name -> ok (Var name, tv, env)
| Constructor (m, param) ->
let%bind (param'_expr, param'_tv, _) = translate_annotated_expression env ae in
let%bind map_tv = AST.get_t_sum ae.type_annotation in
let node_tv = Append_tree.of_list @@ kv_list_of_map map_tv in
let%bind ae' =
let leaf (k, tv) : (expression' option * type_value) result =
if k = m then (
let%bind _ =
trace (simple_error "constructor parameter doesn't have expected type (shouldn't happen here)")
@@ AST.type_value_eq (tv, param.type_annotation) in
ok (Some (param'_expr), param'_tv)
) else (
let%bind tv = translate_type tv in
ok (None, tv)
) in
let node a b : (expression' option * type_value) result =
let%bind a = a in
let%bind b = b in
match (a, b) with
| (None, a), (None, b) -> ok (None, `Or (a, b))
| (Some _, _), (Some _, _) -> simple_fail "several identical constructors in the same variant (shouldn't happen here)"
| (Some v, a), (None, b) -> ok (Some (Predicate ("LEFT", [v, a, env])), `Or (a, b))
| (None, a), (Some v, b) -> ok (Some (Predicate ("RIGHT", [v, b, env])), `Or (a, b))
in
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
let%bind ae =
trace_option (simple_error "constructor doesn't exist in claimed type (shouldn't happen here)")
ae_opt in
ok (ae, tv, env) in
ok ae'
| Tuple lst ->
let node = Append_tree.of_list lst in
let aux (a:expression result) (b:expression result) : expression result =
let%bind (_, a_ty, _) as a = a in
let%bind (_, b_ty, _) as b = b in
ok (Predicate ("PAIR", [a; b]), `Pair(a_ty, b_ty), env)
in
Append_tree.fold_ne (translate_annotated_expression env) aux node
| Tuple_accessor (tpl, ind) ->
let%bind (tpl'_expr, _, _) = translate_annotated_expression env tpl in
let%bind tpl_tv = AST.get_t_tuple ae.type_annotation in
let node_tv = Append_tree.of_list @@ List.mapi (fun i a -> (a, i)) tpl_tv in
let%bind ae' =
let leaf (tv, i) : (expression' option * type_value) result =
let%bind tv = translate_type tv in
if i = ind then (
ok (Some (tpl'_expr), tv)
) else (
ok (None, tv)
) in
let node a b : (expression' option * type_value) result =
let%bind a = a in
let%bind b = b in
match (a, b) with
| (None, a), (None, b) -> ok (None, `Pair (a, b))
| (Some _, _), (Some _, _) -> simple_fail "several identical indexes in the same tuple (shouldn't happen here)"
| (Some v, a), (None, b) -> ok (Some (Predicate ("CAR", [v, a, env])), `Pair (a, b))
| (None, a), (Some v, b) -> ok (Some (Predicate ("CDR", [v, b, env])), `Pair (a, b))
in
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
let%bind ae = trace_option (simple_error "bad index in tuple (shouldn't happen here)")
ae_opt in
ok (ae, tv, env) in
ok ae'
| Record m ->
let node = Append_tree.of_list @@ list_of_map m in
let aux a b : expression result =
let%bind (_, a_ty, _) as a = a in
let%bind (_, b_ty, _) as b = b in
ok (Predicate ("PAIR", [a; b]), `Pair(a_ty, b_ty), env)
in
Append_tree.fold_ne (translate_annotated_expression env) aux node
| Record_accessor (r, key) ->
let%bind (r'_expr, _, _) = translate_annotated_expression env r in
let%bind r_tv = AST.get_t_record ae.type_annotation in
let node_tv = Append_tree.of_list @@ kv_list_of_map r_tv in
let%bind ae' =
let leaf (key', tv) : (expression' option * type_value) result =
let%bind tv = translate_type tv in
if key = key' then (
ok (Some (r'_expr), tv)
) else (
ok (None, tv)
) in
let node a b : (expression' option * type_value) result =
let%bind a = a in
let%bind b = b in
match (a, b) with
| (None, a), (None, b) -> ok (None, `Pair (a, b))
| (Some _, _), (Some _, _) -> simple_fail "several identical keys in the same record (shouldn't happen here)"
| (Some v, a), (None, b) -> ok (Some (Predicate ("CAR", [v, a, env])), `Pair (a, b))
| (None, a), (Some v, b) -> ok (Some (Predicate ("CDR", [v, b, env])), `Pair (a, b))
in
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
let%bind ae = trace_option (simple_error "bad key in record (shouldn't happen here)")
ae_opt in
ok (ae, tv, env) in
ok ae'
| Constant (name, lst) ->
let%bind lst' = bind_list @@ List.map (translate_annotated_expression env) lst in
ok (Predicate (name, lst'), tv, env)
| Lambda { binder ; input_type ; output_type ; body ; result } ->
(* Try to type it in an empty env, if it succeeds, transpiles it as a quote value, else, as a closure expression. *)
let%bind empty_env =
let%bind input = translate_type input_type in
ok Environment.(add (binder, input) empty) in
match to_option (translate_block empty_env body), to_option (translate_annotated_expression empty_env result) with
| Some body, Some result ->
let capture_type = No_capture in
let%bind input = translate_type input_type in
let%bind output = translate_type output_type in
let content = {binder;input;output;body;result;capture_type} in
ok (Literal (`Function {capture=None;content}), tv, env)
| _ ->
(* Shallow capture. Capture the whole environment. Extend it with a new scope. Append it the input. *)
let%bind input = translate_type input_type in
let sub_env = Environment.extend env in
let full_env = Environment.add (binder, input) sub_env in
let%bind (_, post_env) as body = translate_block full_env body in
let%bind result = translate_annotated_expression post_env result in
let capture_type = Shallow_capture sub_env in
let input = Environment.to_mini_c_type full_env in
let%bind output = translate_type output_type in
let content = {binder;input;output;body;result;capture_type} in
ok (Function_expression content, tv, env)
let translate_declaration env (d:AST.declaration) : toplevel_statement result =
match d with
| Constant_declaration {name;annotated_expression} ->
let%bind ((_, tv, _) as expression) = translate_annotated_expression env annotated_expression in
let env' = Environment.add (name, tv) env in
ok @@ ((name, expression), env')
let translate_program (lst:AST.program) : program result =
let aux (prev:(toplevel_statement list * Environment.t) result) cur =
let%bind (tl, env) = prev in
let%bind ((_, env') as cur') = translate_declaration env cur in
ok (cur' :: tl, env')
in
let%bind (statements, _) = List.fold_left aux (ok ([], Environment.empty)) lst in
ok statements