tmp
This commit is contained in:
parent
8819422542
commit
0975f71059
196
src/ligo/.old.transpiler.ml
Normal file
196
src/ligo/.old.transpiler.ml
Normal file
@ -0,0 +1,196 @@
|
||||
open Mini_c
|
||||
module AST = Ligo_parser.Typed.O
|
||||
module SMap = Ligo_parser.Typed.SMap
|
||||
|
||||
module Rename = struct
|
||||
open! AST
|
||||
|
||||
let rec rename_expr_case (src:string) (dst:string) : expr_case -> expr_case = function
|
||||
| App {operator;arguments} -> App {operator = rename_operator src dst operator ; arguments = rename_exprs src dst arguments}
|
||||
| Var n when n.name.name = src -> Var {n with name = {n.name with name = dst}}
|
||||
| Var n -> Var n
|
||||
| Constant c -> Constant c
|
||||
| Record r -> Record (List.map (fun (key, expr) -> key, rename_expr src dst expr) r)
|
||||
| Lambda {parameter} as l when parameter.name.name = src -> l
|
||||
| Lambda ({instructions;declarations} as l) ->
|
||||
Lambda {l with instructions = rename_instrs src dst instructions ; declarations = rename_declarations src dst declarations}
|
||||
|
||||
and rename_expr (src:string) (dst:string) (e : expr) : expr =
|
||||
{ e with expr = rename_expr_case src dst e.expr }
|
||||
|
||||
and rename_exprs src dst exprs = List.map (rename_expr src dst) exprs
|
||||
|
||||
and rename_operator_case (src:string) (dst:string) : operator_case -> operator_case = function
|
||||
| Function n when n.name = src -> Function {n with name = dst}
|
||||
| x -> x
|
||||
|
||||
and rename_operator src dst (o:operator) : operator = {o with operator = rename_operator_case src dst o.operator}
|
||||
|
||||
and rename_var src dst (v:var_name) : var_name =
|
||||
if v.name = src
|
||||
then {v with name = dst}
|
||||
else v
|
||||
|
||||
and rename_instr (src:string) (dst:string) : instr -> instr = function
|
||||
| Assignment {name;value;orig} when name.name = src -> Assignment {name = {name with name = dst};value;orig}
|
||||
| Assignment {name;value;orig} -> Assignment {value = rename_expr src dst value;name;orig}
|
||||
| While {condition;body;orig} -> While {condition = rename_expr src dst condition;body=rename_instrs src dst body;orig}
|
||||
| ForCollection {list;var;body;orig} -> ForCollection {list = rename_expr src dst list;var = rename_var src dst var;
|
||||
body = rename_instrs src dst body;orig}
|
||||
| Match ({expr;cases} as a) -> Match {a with expr = rename_expr src dst expr ; cases = rename_match_cases src dst cases}
|
||||
| ProcedureCall {expr;orig} -> ProcedureCall {expr = rename_expr src dst expr;orig}
|
||||
| Fail {expr;orig} -> Fail {expr = rename_expr src dst expr;orig}
|
||||
|
||||
and rename_instrs src dst : instr list -> instr list = List.map (rename_instr src dst)
|
||||
|
||||
and rename_match_cases (src:string) (dst:string) (m:(_ * instr list) list) =
|
||||
List.map (fun (x, y) -> x, rename_instrs src dst y) m
|
||||
|
||||
and rename_declaration (src:string) (dst:string) ({var} as d: decl) : decl =
|
||||
if var.name.name = src
|
||||
then {d with var = {var with name = {var.name with name = dst}}}
|
||||
else d
|
||||
|
||||
and rename_declarations (src:string) (dst:string) (decls:decl list) =
|
||||
List.map (rename_declaration src dst) decls
|
||||
end
|
||||
|
||||
let list_of_map m = List.rev @@ SMap.fold (fun _ v prev -> v :: prev) m []
|
||||
|
||||
let rec translate_type : AST.type_expr -> type_value result = fun {type_expr} ->
|
||||
match type_expr with
|
||||
| Unit -> ok (`Base Unit)
|
||||
| Int -> ok (`Base Int)
|
||||
| String -> ok (`Base String)
|
||||
| Bool -> ok (`Base Bool)
|
||||
| Sum m ->
|
||||
let node = Append_tree.of_list @@ List.map snd @@ list_of_map m in
|
||||
let aux a b : type_value result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Or (a, b))
|
||||
in
|
||||
Append_tree.fold_ne translate_type aux node
|
||||
| Record r ->
|
||||
let node = Append_tree.of_list @@ List.map snd @@ list_of_map r in
|
||||
let aux a b : type_value result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Pair (a, b))
|
||||
in
|
||||
Append_tree.fold_ne translate_type aux node
|
||||
| Ref t -> translate_type t
|
||||
| Function {arg;ret} ->
|
||||
let%bind arg = translate_type arg in
|
||||
let%bind ret = translate_type ret in
|
||||
ok (`Function(arg, ret))
|
||||
| TypeApp _ -> simple_fail "No type application"
|
||||
|
||||
let translate_constant : AST.constant -> value result = function
|
||||
| Unit -> ok `Unit
|
||||
| String s -> ok (`String s)
|
||||
| Int n -> ok (`Int (Z.to_int n))
|
||||
| False -> ok (`Bool false)
|
||||
| True -> ok (`Bool true)
|
||||
| _ -> simple_fail ""
|
||||
|
||||
let rec translate_lambda : AST.lambda -> anon_function result =
|
||||
fun {declarations;parameter;instructions;result} ->
|
||||
let ({name;ty}:AST.typed_var) = parameter in
|
||||
let%bind input_ty = translate_type ty in
|
||||
let%bind output_ty = translate_type result.ty in
|
||||
let%bind result = translate_expr result in
|
||||
let%bind (declaration_statements : statement list) = translate_declarations declarations in
|
||||
let%bind (instruction_statements : statement list) = translate_instructions instructions in
|
||||
let body = declaration_statements @ instruction_statements in
|
||||
ok {content={binder=name.name;input=input_ty;output=output_ty;body;result} ; capture = No_capture}
|
||||
|
||||
and translate_expr' : AST.expr_case -> expression' result = function
|
||||
| Var {name} -> ok (Var name.name)
|
||||
| Constant cst ->
|
||||
let%bind value = translate_constant cst in
|
||||
ok (Literal value)
|
||||
| Lambda _ -> simple_fail "Mini_c doesn't deal with lambda in expressions yet"
|
||||
| _ -> simple_fail ""
|
||||
|
||||
and translate_expr env : AST.expr -> expression result = fun {expr;ty} ->
|
||||
let%bind expr = translate_expr' expr in
|
||||
let%bind ty = translate_type ty in
|
||||
ok (expr, ty, env)
|
||||
|
||||
and translate_declaration : AST.decl -> statement result = fun {var;value} ->
|
||||
let%bind expr = translate_expr value in
|
||||
ok (Assignment(Variable(var.name.name, expr)))
|
||||
|
||||
and translate_declarations : AST.decl list -> statement list result = fun declarations ->
|
||||
bind_list @@ List.map translate_declaration declarations
|
||||
|
||||
and translate_match (expr:AST.expr) (cases: (AST.pattern * AST.instr list) list) : statement result =
|
||||
match cases with
|
||||
| [(AST.PTrue, instrs_true) ; (AST.PFalse, instrs_false) ] ->
|
||||
let%bind cond = translate_expr expr in
|
||||
let%bind b_true = translate_instructions instrs_true in
|
||||
let%bind b_false = translate_instructions instrs_false in
|
||||
ok (Cond (cond, b_true, b_false))
|
||||
| [(AST.PFalse, instrs_false) ; (AST.PTrue, instrs_true) ] ->
|
||||
let%bind cond = translate_expr expr in
|
||||
let%bind b_true = translate_instructions instrs_true in
|
||||
let%bind b_false = translate_instructions instrs_false in
|
||||
ok (Cond (cond, b_true, b_false))
|
||||
| _ -> simple_fail "unrecognized pattern"
|
||||
|
||||
and translate_instruction : AST.instr -> statement result = function
|
||||
| Assignment {name ; value} ->
|
||||
let%bind expr = translate_expr value in
|
||||
ok (Assignment (Variable(name.name, expr)))
|
||||
| While {condition ; body} ->
|
||||
let%bind block = translate_instructions body in
|
||||
let%bind cond = translate_expr condition in
|
||||
ok (While (cond, block))
|
||||
| ForCollection _ -> simple_fail "We don't deal with for collection yet"
|
||||
| Match {expr;cases} -> translate_match expr cases
|
||||
| Fail _ -> simple_fail "Fail have to be added in Mini_C"
|
||||
| ProcedureCall _ -> simple_fail "Drop Unit have to be added in Mini_C"
|
||||
|
||||
and translate_instructions : AST.instr list -> statement list result = fun instrs ->
|
||||
bind_list @@ List.map translate_instruction instrs
|
||||
|
||||
let translate_program : AST.ast -> block result = fun {declarations} ->
|
||||
translate_declarations declarations
|
||||
|
||||
let rec to_mini_c_value' : (AST.expr_case * AST.type_expr) -> value result = function
|
||||
| Constant c, _ -> translate_constant c
|
||||
| App {arguments;operator = {operator = Constructor c ; ty = {type_expr = Sum lst}}}, _ ->
|
||||
let node = Append_tree.of_list @@ List.map fst @@ list_of_map lst in
|
||||
let%bind lst =
|
||||
trace_option (simple_error "Not constructor of variant type") @@
|
||||
Append_tree.exists_path (fun (x:AST.name_and_region) -> x.name = c.name) node in
|
||||
let arg = List.hd arguments in
|
||||
let%bind arg = to_mini_c_value arg in
|
||||
let ors = List.fold_left (fun b a -> if a then `Right b else `Left b) arg (List.rev lst) in
|
||||
ok ors
|
||||
| App _, _ -> simple_fail "Applications aren't value"
|
||||
| Record lst, _ ->
|
||||
let node = Append_tree.of_list @@ List.map snd lst in
|
||||
let aux a b =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Pair (a, b))
|
||||
in
|
||||
Append_tree.fold_ne to_mini_c_value aux node
|
||||
| Lambda _, _-> simple_fail "Lambda aren't value yet"
|
||||
| Var _, _-> simple_fail "Var aren't value yet"
|
||||
|
||||
and to_mini_c_value : AST.expr -> value result = fun {expr;ty} ->
|
||||
to_mini_c_value' (expr, ty)
|
||||
|
||||
let ghost expr ty : AST.expr = {expr;ty;orig=`TODO}
|
||||
|
||||
let of_mini_c_value ({type_expr} as ty, v : AST.type_expr * value) : AST.expr result = match (type_expr, v) with
|
||||
| String, `String s -> ok @@ ghost (Constant (String s)) ty
|
||||
| Bool, `Bool b -> ok @@ ghost (Constant (if b then True else False)) ty
|
||||
| Unit, `Unit -> ok @@ ghost (Constant (Unit)) ty
|
||||
| Int, `Int n -> ok @@ ghost (Constant (Int (Z.of_int n))) ty
|
||||
| Function _, _ -> simple_fail "Functions aren't retrieved from Mini_C yet"
|
||||
| _ -> simple_fail "of_mini_c_value error"
|
||||
|
@ -41,18 +41,21 @@ and type_expression =
|
||||
| Type_variable of type_name
|
||||
| Type_constant of type_name * te list
|
||||
|
||||
and expression =
|
||||
(* Base *)
|
||||
| Literal of literal
|
||||
| Constant of name * ae list (* For language constants, like (Cons hd tl) or (plus i j) *)
|
||||
| Variable of name
|
||||
| Lambda of {
|
||||
and lambda = {
|
||||
binder: name ;
|
||||
input_type: type_expression ;
|
||||
output_type: type_expression ;
|
||||
result: ae ;
|
||||
body: block ;
|
||||
}
|
||||
|
||||
and expression =
|
||||
(* Base *)
|
||||
| Literal of literal
|
||||
| Constant of name * ae list (* For language constants, like (Cons hd tl) or (plus i j) *)
|
||||
| Variable of name
|
||||
| Lambda of lambda
|
||||
| Application of ae * ae
|
||||
(* Tuple *)
|
||||
| Tuple of ae list
|
||||
| Tuple_accessor of ae * int (* Access n'th tuple's element *)
|
||||
@ -63,6 +66,7 @@ and expression =
|
||||
| Record_accessor of ae * string
|
||||
|
||||
and literal =
|
||||
| Unit
|
||||
| Bool of bool
|
||||
| Number of int
|
||||
| String of string
|
||||
@ -92,3 +96,126 @@ and matching =
|
||||
match_some : name * b ;
|
||||
}
|
||||
| Match_tuple of (name * b) list
|
||||
|
||||
let ae expression = {expression ; type_annotation = None}
|
||||
|
||||
open Ligo_helpers.Trace
|
||||
|
||||
module Simplify = struct
|
||||
module Raw = Ligo_parser.AST
|
||||
|
||||
let nseq_to_list (hd, tl) = hd :: tl
|
||||
let npseq_to_list (hd, tl) = hd :: (List.map snd tl)
|
||||
|
||||
let rec simpl_type_expression (t:Raw.type_expr) : type_expression result =
|
||||
match t with
|
||||
| TPar x -> simpl_type_expression x.value.inside
|
||||
| TAlias v -> ok @@ Type_variable v.value
|
||||
| TApp x ->
|
||||
let (name, tuple) = x.value in
|
||||
let%bind lst = bind_list
|
||||
@@ List.map simpl_type_expression
|
||||
@@ npseq_to_list tuple.value.inside in
|
||||
ok @@ Type_constant (name.value, lst)
|
||||
| TProd p ->
|
||||
let%bind lst = bind_list
|
||||
@@ List.map simpl_type_expression
|
||||
@@ npseq_to_list p.value in
|
||||
ok @@ Type_tuple lst
|
||||
| TRecord r ->
|
||||
let aux = fun (x, y) -> let%bind y = simpl_type_expression y in ok (x, y) in
|
||||
let%bind lst = bind_list
|
||||
@@ List.map aux
|
||||
@@ List.map (fun (x:Raw.field_decl Raw.reg) -> (x.value.field_name.value, x.value.field_type))
|
||||
@@ npseq_to_list r.value.fields in
|
||||
let m = List.fold_left (fun m (x, y) -> SMap.add x y m) SMap.empty lst in
|
||||
ok @@ Type_record m
|
||||
| TSum s ->
|
||||
let aux (v:Raw.variant Raw.reg) =
|
||||
let%bind te = simpl_list_type_expression
|
||||
@@ npseq_to_list v.value.product.value in
|
||||
ok (v.value.constr.value, te)
|
||||
in
|
||||
let%bind lst = bind_list
|
||||
@@ List.map aux
|
||||
@@ npseq_to_list s.value in
|
||||
let m = List.fold_left (fun m (x, y) -> SMap.add x y m) SMap.empty lst in
|
||||
ok @@ Type_sum m
|
||||
|
||||
and simpl_list_type_expression (lst:Raw.type_expr list) : type_expression result =
|
||||
match lst with
|
||||
| [] -> assert false
|
||||
| [hd] -> simpl_type_expression hd
|
||||
| lst ->
|
||||
let%bind lst = bind_list @@ List.map simpl_type_expression lst in
|
||||
ok @@ Type_tuple lst
|
||||
|
||||
let rec simpl_expression (t:Raw.expr) : ae result =
|
||||
match t with
|
||||
| EVar c -> ok @@ ae @@ Variable c.value
|
||||
| ECall x ->
|
||||
let (name, args) = x.value in
|
||||
let f = name.value in
|
||||
let%bind arg = simpl_list_expression
|
||||
@@ npseq_to_list args.value.inside in
|
||||
ok @@ ae @@ Application (ae @@ Variable f, arg)
|
||||
| EPar x -> simpl_expression x.value.inside
|
||||
| EUnit _ -> ok @@ ae @@ Literal Unit
|
||||
| EBytes x -> ok @@ ae @@ Literal (Bytes (fst x.value))
|
||||
| ETuple tpl ->
|
||||
simpl_list_expression
|
||||
@@ npseq_to_list tpl.value.inside
|
||||
| EConstr (ConstrApp c) ->
|
||||
let (c, args) = c.value in
|
||||
let%bind arg =
|
||||
simpl_list_expression
|
||||
@@ npseq_to_list args.value.inside in
|
||||
ok @@ ae @@ Constructor (c.value, arg)
|
||||
| EArith (Add c) ->
|
||||
let%bind (a, b) = simpl_binop c.value in
|
||||
ok @@ ae @@ Constant ("ADD", [a;b])
|
||||
| EArith (Int n) ->
|
||||
let n = Z.to_int @@ snd @@ n.value in
|
||||
ok @@ ae @@ Literal (Number n)
|
||||
| EArith _ -> simple_fail "arith: not supported yet"
|
||||
| EString (String s) ->
|
||||
ok @@ ae @@ Literal (String s.value)
|
||||
| EString _ -> simple_fail "string: not supported yet"
|
||||
| _ -> simple_fail "todo"
|
||||
|
||||
and simpl_binop (t:_ Raw.bin_op) : (ae * ae) result =
|
||||
let%bind a = simpl_expression t.arg1 in
|
||||
let%bind b = simpl_expression t.arg2 in
|
||||
ok (a, b)
|
||||
|
||||
and simpl_list_expression (lst:Raw.expr list) : ae result =
|
||||
match lst with
|
||||
| [] -> ok @@ ae @@ Literal Unit
|
||||
| [hd] -> simpl_expression hd
|
||||
| lst ->
|
||||
let%bind lst = bind_list @@ List.map simpl_expression lst in
|
||||
ok @@ ae @@ Tuple lst
|
||||
|
||||
and simpl_lambda (t:Raw.lambda_decl) : lambda result = simple_fail "todo"
|
||||
|
||||
and simpl_declaration (t:Raw.declaration) : declaration result =
|
||||
let open! Raw in
|
||||
match t with
|
||||
| TypeDecl x ->
|
||||
let {name;type_expr} : Raw.type_decl = x.value in
|
||||
let%bind type_expression = simpl_type_expression type_expr in
|
||||
ok @@ Type_declaration {type_name=name.value;type_expression}
|
||||
| ConstDecl x ->
|
||||
let {name;const_type;init} = x.value in
|
||||
let%bind expression = simpl_expression init in
|
||||
let%bind t = simpl_type_expression const_type in
|
||||
let type_annotation = Some t in
|
||||
ok @@ Constant_declaration {name=name.value;annotated_expression={expression with type_annotation}}
|
||||
| LambdaDecl (FunDecl x) ->
|
||||
let {name;param;ret_type;local_decls;block;return} : fun_decl = x.value in
|
||||
simple_fail "todo"
|
||||
| _ -> simple_fail "todo"
|
||||
|
||||
let simpl_program (t:Raw.ast) : program result =
|
||||
bind_list @@ List.map simpl_declaration @@ nseq_to_list t.decl
|
||||
end
|
||||
|
@ -38,6 +38,10 @@ let trace err = function
|
||||
| Ok _ as o -> o
|
||||
| Errors errs -> Errors (err :: errs)
|
||||
|
||||
let to_option = function
|
||||
| Ok o -> Some o
|
||||
| Errors _ -> None
|
||||
|
||||
let trace_option error = function
|
||||
| None -> fail error
|
||||
| Some s -> ok s
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
(* Abstract Syntax Tree (AST) for Ligo *)
|
||||
(* Abstract Syntax Tree (AST) for LIGO *)
|
||||
|
||||
[@@@warning "-30"]
|
||||
|
||||
@ -21,9 +21,10 @@ val nseq_to_region : ('a -> Region.t) -> 'a nseq -> Region.t
|
||||
val nsepseq_to_region : ('a -> Region.t) -> ('a,'sep) nsepseq -> Region.t
|
||||
val sepseq_to_region : ('a -> Region.t) -> ('a,'sep) sepseq -> Region.t
|
||||
|
||||
(* Keywords of Ligo *)
|
||||
(* Keywords of LIGO *)
|
||||
|
||||
type kwd_begin = Region.t
|
||||
type kwd_case = Region.t
|
||||
type kwd_const = Region.t
|
||||
type kwd_down = Region.t
|
||||
type kwd_else = Region.t
|
||||
@ -35,14 +36,14 @@ type kwd_function = Region.t
|
||||
type kwd_if = Region.t
|
||||
type kwd_in = Region.t
|
||||
type kwd_is = Region.t
|
||||
type kwd_match = Region.t
|
||||
type kwd_map = Region.t
|
||||
type kwd_mod = Region.t
|
||||
type kwd_not = Region.t
|
||||
type kwd_null = Region.t
|
||||
type kwd_of = Region.t
|
||||
type kwd_operations = Region.t
|
||||
type kwd_patch = Region.t
|
||||
type kwd_procedure = Region.t
|
||||
type kwd_record = Region.t
|
||||
type kwd_skip = Region.t
|
||||
type kwd_step = Region.t
|
||||
type kwd_storage = Region.t
|
||||
type kwd_then = Region.t
|
||||
@ -62,34 +63,34 @@ type c_Unit = Region.t
|
||||
|
||||
(* Symbols *)
|
||||
|
||||
type semi = Region.t
|
||||
type comma = Region.t
|
||||
type lpar = Region.t
|
||||
type rpar = Region.t
|
||||
type lbrace = Region.t
|
||||
type rbrace = Region.t
|
||||
type lbracket = Region.t
|
||||
type rbracket = Region.t
|
||||
type cons = Region.t
|
||||
type vbar = Region.t
|
||||
type arrow = Region.t
|
||||
type ass = Region.t
|
||||
type equal = Region.t
|
||||
type colon = Region.t
|
||||
type bool_or = Region.t
|
||||
type bool_and = Region.t
|
||||
type lt = Region.t
|
||||
type leq = Region.t
|
||||
type gt = Region.t
|
||||
type geq = Region.t
|
||||
type neq = Region.t
|
||||
type plus = Region.t
|
||||
type minus = Region.t
|
||||
type slash = Region.t
|
||||
type times = Region.t
|
||||
type dot = Region.t
|
||||
type wild = Region.t
|
||||
type cat = Region.t
|
||||
type semi = Region.t (* ";" *)
|
||||
type comma = Region.t (* "," *)
|
||||
type lpar = Region.t (* "(" *)
|
||||
type rpar = Region.t (* ")" *)
|
||||
type lbrace = Region.t (* "{" *)
|
||||
type rbrace = Region.t (* "}" *)
|
||||
type lbracket = Region.t (* "[" *)
|
||||
type rbracket = Region.t (* "]" *)
|
||||
type cons = Region.t (* "#" *)
|
||||
type vbar = Region.t (* "|" *)
|
||||
type arrow = Region.t (* "->" *)
|
||||
type assign = Region.t (* ":=" *)
|
||||
type equal = Region.t (* "=" *)
|
||||
type colon = Region.t (* ":" *)
|
||||
type bool_or = Region.t (* "||" *)
|
||||
type bool_and = Region.t (* "&&" *)
|
||||
type lt = Region.t (* "<" *)
|
||||
type leq = Region.t (* "<=" *)
|
||||
type gt = Region.t (* ">" *)
|
||||
type geq = Region.t (* ">=" *)
|
||||
type neq = Region.t (* "=/=" *)
|
||||
type plus = Region.t (* "+" *)
|
||||
type minus = Region.t (* "-" *)
|
||||
type slash = Region.t (* "/" *)
|
||||
type times = Region.t (* "*" *)
|
||||
type dot = Region.t (* "." *)
|
||||
type wild = Region.t (* "_" *)
|
||||
type cat = Region.t (* "^" *)
|
||||
|
||||
(* Virtual tokens *)
|
||||
|
||||
@ -104,25 +105,29 @@ type field_name = string reg
|
||||
type map_name = string reg
|
||||
type constr = string reg
|
||||
|
||||
(* Comma-separated non-empty lists *)
|
||||
|
||||
type 'a csv = ('a, comma) nsepseq
|
||||
|
||||
(* Bar-separated non-empty lists *)
|
||||
|
||||
type 'a bsv = ('a, vbar) nsepseq
|
||||
|
||||
(* Parentheses *)
|
||||
|
||||
type 'a par = (lpar * 'a * rpar) reg
|
||||
type 'a par = {
|
||||
lpar : lpar;
|
||||
inside : 'a;
|
||||
rpar : rpar
|
||||
}
|
||||
|
||||
(* Brackets compounds *)
|
||||
|
||||
type 'a brackets = (lbracket * 'a * rbracket) reg
|
||||
type 'a brackets = {
|
||||
lbracket : lbracket;
|
||||
inside : 'a;
|
||||
rbracket : rbracket
|
||||
}
|
||||
|
||||
(* Braced compounds *)
|
||||
|
||||
type 'a braces = (lbrace * 'a * rbrace) reg
|
||||
type 'a braces = {
|
||||
lbrace : lbrace;
|
||||
inside : 'a;
|
||||
rbrace : rbrace
|
||||
}
|
||||
|
||||
(* The Abstract Syntax Tree *)
|
||||
|
||||
@ -136,8 +141,6 @@ and ast = t
|
||||
and declaration =
|
||||
TypeDecl of type_decl reg
|
||||
| ConstDecl of const_decl reg
|
||||
| StorageDecl of storage_decl reg
|
||||
| OpDecl of operations_decl reg
|
||||
| LambdaDecl of lambda_decl
|
||||
|
||||
and const_decl = {
|
||||
@ -150,22 +153,6 @@ and const_decl = {
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and storage_decl = {
|
||||
kwd_storage : kwd_storage;
|
||||
name : variable;
|
||||
colon : colon;
|
||||
store_type : type_expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and operations_decl = {
|
||||
kwd_operations : kwd_operations;
|
||||
name : variable;
|
||||
colon : colon;
|
||||
op_type : type_expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
and type_decl = {
|
||||
@ -177,24 +164,36 @@ and type_decl = {
|
||||
}
|
||||
|
||||
and type_expr =
|
||||
Prod of cartesian
|
||||
| Sum of (variant, vbar) nsepseq reg
|
||||
| Record of record_type
|
||||
| TypeApp of (type_name * type_tuple) reg
|
||||
| ParType of type_expr par
|
||||
TProd of cartesian
|
||||
| TSum of (variant reg, vbar) nsepseq reg
|
||||
| TRecord of record_type reg
|
||||
| TApp of (type_name * type_tuple) reg
|
||||
| TPar of type_expr par reg
|
||||
| TAlias of variable
|
||||
|
||||
and cartesian = (type_expr, times) nsepseq reg
|
||||
|
||||
and variant = (constr * kwd_of * cartesian) reg
|
||||
and variant = {
|
||||
constr : constr;
|
||||
kwd_of : kwd_of;
|
||||
product : cartesian
|
||||
}
|
||||
|
||||
and record_type = (kwd_record * field_decls * kwd_end) reg
|
||||
and record_type = {
|
||||
kwd_record : kwd_record;
|
||||
fields : field_decls;
|
||||
kwd_end : kwd_end
|
||||
}
|
||||
|
||||
and field_decls = (field_decl, semi) nsepseq
|
||||
and field_decls = (field_decl reg, semi) nsepseq
|
||||
|
||||
and field_decl = (variable * colon * type_expr) reg
|
||||
and field_decl = {
|
||||
field_name : field_name;
|
||||
colon : colon;
|
||||
field_type : type_expr
|
||||
}
|
||||
|
||||
and type_tuple = (type_name, comma) nsepseq par
|
||||
and type_tuple = (type_expr, comma) nsepseq par reg
|
||||
|
||||
(* Function and procedure declarations *)
|
||||
|
||||
@ -230,22 +229,50 @@ and proc_decl = {
|
||||
and entry_decl = {
|
||||
kwd_entrypoint : kwd_entrypoint;
|
||||
name : variable;
|
||||
param : parameters;
|
||||
param : entry_params;
|
||||
colon : colon;
|
||||
ret_type : type_expr;
|
||||
kwd_is : kwd_is;
|
||||
local_decls : local_decl list;
|
||||
block : block reg;
|
||||
kwd_with : kwd_with;
|
||||
return : expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and parameters = (param_decl, semi) nsepseq par
|
||||
and parameters = (param_decl, semi) nsepseq par reg
|
||||
|
||||
and entry_params = (entry_param_decl, semi) nsepseq par reg
|
||||
|
||||
and entry_param_decl =
|
||||
EntryConst of param_const reg
|
||||
| EntryVar of param_var reg
|
||||
| EntryStore of storage reg
|
||||
|
||||
and storage = {
|
||||
kwd_storage : kwd_storage;
|
||||
var : variable;
|
||||
colon : colon;
|
||||
storage_type : type_expr
|
||||
}
|
||||
|
||||
and param_decl =
|
||||
ParamConst of param_const
|
||||
| ParamVar of param_var
|
||||
ParamConst of param_const reg
|
||||
| ParamVar of param_var reg
|
||||
|
||||
and param_const = (kwd_const * variable * colon * type_expr) reg
|
||||
and param_const = {
|
||||
kwd_const : kwd_const;
|
||||
var : variable;
|
||||
colon : colon;
|
||||
param_type : type_expr
|
||||
}
|
||||
|
||||
and param_var = (kwd_var * variable * colon * type_expr) reg
|
||||
and param_var = {
|
||||
kwd_var : kwd_var;
|
||||
var : variable;
|
||||
colon : colon;
|
||||
param_type : type_expr
|
||||
}
|
||||
|
||||
and block = {
|
||||
opening : kwd_begin;
|
||||
@ -264,12 +291,12 @@ and var_decl = {
|
||||
name : variable;
|
||||
colon : colon;
|
||||
var_type : type_expr;
|
||||
ass : ass;
|
||||
assign : assign;
|
||||
init : expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and instructions = (instruction, semi) nsepseq reg
|
||||
and instructions = (instruction, semi) nsepseq
|
||||
|
||||
and instruction =
|
||||
Single of single_instr
|
||||
@ -277,12 +304,46 @@ and instruction =
|
||||
|
||||
and single_instr =
|
||||
Cond of conditional reg
|
||||
| Match of match_instr reg
|
||||
| Ass of ass_instr
|
||||
| Case of case_instr reg
|
||||
| Assign of assignment reg
|
||||
| Loop of loop
|
||||
| ProcCall of fun_call
|
||||
| Null of kwd_null
|
||||
| Fail of (kwd_fail * expr) reg
|
||||
| Fail of fail_instr reg
|
||||
| Skip of kwd_skip
|
||||
| RecordPatch of record_patch reg
|
||||
| MapPatch of map_patch reg
|
||||
|
||||
and map_patch = {
|
||||
kwd_patch : kwd_patch;
|
||||
path : path;
|
||||
kwd_with : kwd_with;
|
||||
map_inj : map_injection reg
|
||||
}
|
||||
|
||||
and map_injection = {
|
||||
opening : kwd_map;
|
||||
bindings : (binding reg, semi) nsepseq;
|
||||
terminator : semi option;
|
||||
close : kwd_end
|
||||
}
|
||||
|
||||
and binding = {
|
||||
source : expr;
|
||||
arrow : arrow;
|
||||
image : expr
|
||||
}
|
||||
|
||||
and record_patch = {
|
||||
kwd_patch : kwd_patch;
|
||||
path : path;
|
||||
kwd_with : kwd_with;
|
||||
record_inj : record_injection reg
|
||||
}
|
||||
|
||||
and fail_instr = {
|
||||
kwd_fail : kwd_fail;
|
||||
fail_expr : expr
|
||||
}
|
||||
|
||||
and conditional = {
|
||||
kwd_if : kwd_if;
|
||||
@ -293,26 +354,46 @@ and conditional = {
|
||||
ifnot : instruction
|
||||
}
|
||||
|
||||
and match_instr = {
|
||||
kwd_match : kwd_match;
|
||||
and case_instr = {
|
||||
kwd_case : kwd_case;
|
||||
expr : expr;
|
||||
kwd_with : kwd_with;
|
||||
kwd_of : kwd_of;
|
||||
lead_vbar : vbar option;
|
||||
cases : cases;
|
||||
kwd_end : kwd_end
|
||||
}
|
||||
|
||||
and cases = (case, vbar) nsepseq reg
|
||||
and cases = (case reg, vbar) nsepseq reg
|
||||
|
||||
and case = (pattern * arrow * instruction) reg
|
||||
and case = {
|
||||
pattern : pattern;
|
||||
arrow : arrow;
|
||||
instr : instruction
|
||||
}
|
||||
|
||||
and ass_instr = (variable * ass * expr) reg
|
||||
and assignment = {
|
||||
lhs : lhs;
|
||||
assign : assign;
|
||||
rhs : rhs;
|
||||
}
|
||||
|
||||
and lhs =
|
||||
Path of path
|
||||
| MapPath of map_lookup reg
|
||||
|
||||
and rhs =
|
||||
Expr of expr
|
||||
| NoneExpr of c_None
|
||||
|
||||
and loop =
|
||||
While of while_loop
|
||||
While of while_loop reg
|
||||
| For of for_loop
|
||||
|
||||
and while_loop = (kwd_while * expr * block reg) reg
|
||||
and while_loop = {
|
||||
kwd_while : kwd_while;
|
||||
cond : expr;
|
||||
block : block reg
|
||||
}
|
||||
|
||||
and for_loop =
|
||||
ForInt of for_int reg
|
||||
@ -320,7 +401,7 @@ and for_loop =
|
||||
|
||||
and for_int = {
|
||||
kwd_for : kwd_for;
|
||||
ass : ass_instr;
|
||||
assign : var_assign reg;
|
||||
down : kwd_down option;
|
||||
kwd_to : kwd_to;
|
||||
bound : expr;
|
||||
@ -328,6 +409,12 @@ and for_int = {
|
||||
block : block reg
|
||||
}
|
||||
|
||||
and var_assign = {
|
||||
name : variable;
|
||||
assign : assign;
|
||||
expr : expr
|
||||
}
|
||||
|
||||
and for_collect = {
|
||||
kwd_for : kwd_for;
|
||||
var : variable;
|
||||
@ -340,98 +427,177 @@ and for_collect = {
|
||||
(* Expressions *)
|
||||
|
||||
and expr =
|
||||
Or of (expr * bool_or * expr) reg
|
||||
| And of (expr * bool_and * expr) reg
|
||||
| Lt of (expr * lt * expr) reg
|
||||
| Leq of (expr * leq * expr) reg
|
||||
| Gt of (expr * gt * expr) reg
|
||||
| Geq of (expr * geq * expr) reg
|
||||
| Equal of (expr * equal * expr) reg
|
||||
| Neq of (expr * neq * expr) reg
|
||||
| Cat of (expr * cat * expr) reg
|
||||
| Cons of (expr * cons * expr) reg
|
||||
| Add of (expr * plus * expr) reg
|
||||
| Sub of (expr * minus * expr) reg
|
||||
| Mult of (expr * times * expr) reg
|
||||
| Div of (expr * slash * expr) reg
|
||||
| Mod of (expr * kwd_mod * expr) reg
|
||||
| Neg of (minus * expr) reg
|
||||
| Not of (kwd_not * expr) reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
| Var of Lexer.lexeme reg
|
||||
| String of Lexer.lexeme reg
|
||||
| Bytes of (Lexer.lexeme * MBytes.t) reg
|
||||
ELogic of logic_expr
|
||||
| EArith of arith_expr
|
||||
| EString of string_expr
|
||||
| EList of list_expr
|
||||
| ESet of set_expr
|
||||
| EConstr of constr_expr
|
||||
| ERecord of record_expr
|
||||
| EMap of map_expr
|
||||
| EVar of Lexer.lexeme reg
|
||||
| ECall of fun_call
|
||||
| EBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| EUnit of c_Unit
|
||||
| ETuple of tuple
|
||||
| EPar of expr par reg
|
||||
|
||||
and map_expr =
|
||||
MapLookUp of map_lookup reg
|
||||
| MapInj of map_injection reg
|
||||
|
||||
and map_lookup = {
|
||||
path : path;
|
||||
index : expr brackets reg
|
||||
}
|
||||
|
||||
and path =
|
||||
Name of variable
|
||||
| RecordPath of record_projection reg
|
||||
|
||||
and logic_expr =
|
||||
BoolExpr of bool_expr
|
||||
| CompExpr of comp_expr
|
||||
|
||||
and bool_expr =
|
||||
Or of bool_or bin_op reg
|
||||
| And of bool_and bin_op reg
|
||||
| Not of kwd_not un_op reg
|
||||
| False of c_False
|
||||
| True of c_True
|
||||
| Unit of c_Unit
|
||||
| Tuple of tuple
|
||||
| List of (expr, comma) nsepseq brackets
|
||||
| EmptyList of empty_list
|
||||
| Set of (expr, comma) nsepseq braces
|
||||
| EmptySet of empty_set
|
||||
| NoneExpr of none_expr
|
||||
| FunCall of fun_call
|
||||
| ConstrApp of constr_app
|
||||
| SomeApp of (c_Some * arguments) reg
|
||||
| MapLookUp of map_lookup reg
|
||||
| ParExpr of expr par
|
||||
|
||||
and tuple = (expr, comma) nsepseq par
|
||||
and 'a bin_op = {
|
||||
op : 'a;
|
||||
arg1 : expr;
|
||||
arg2 : expr
|
||||
}
|
||||
|
||||
and empty_list =
|
||||
(lbracket * rbracket * colon * type_expr) par
|
||||
and 'a un_op = {
|
||||
op : 'a;
|
||||
arg : expr
|
||||
}
|
||||
|
||||
and empty_set =
|
||||
(lbrace * rbrace * colon * type_expr) par
|
||||
and comp_expr =
|
||||
Lt of lt bin_op reg
|
||||
| Leq of leq bin_op reg
|
||||
| Gt of gt bin_op reg
|
||||
| Geq of geq bin_op reg
|
||||
| Equal of equal bin_op reg
|
||||
| Neq of neq bin_op reg
|
||||
|
||||
and none_expr =
|
||||
(c_None * colon * type_expr) par
|
||||
and arith_expr =
|
||||
Add of plus bin_op reg
|
||||
| Sub of minus bin_op reg
|
||||
| Mult of times bin_op reg
|
||||
| Div of slash bin_op reg
|
||||
| Mod of kwd_mod bin_op reg
|
||||
| Neg of minus un_op reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
| String of Lexer.lexeme reg
|
||||
|
||||
and list_expr =
|
||||
Cons of cons bin_op reg
|
||||
| List of (expr, comma) nsepseq brackets reg
|
||||
| EmptyList of empty_list reg
|
||||
|
||||
and set_expr =
|
||||
Set of (expr, comma) nsepseq braces reg
|
||||
| EmptySet of empty_set reg
|
||||
|
||||
and constr_expr =
|
||||
SomeApp of (c_Some * arguments) reg
|
||||
| NoneExpr of none_expr reg
|
||||
| ConstrApp of (constr * arguments) reg
|
||||
|
||||
and record_expr =
|
||||
RecordInj of record_injection reg
|
||||
| RecordProj of record_projection reg
|
||||
|
||||
and record_injection = {
|
||||
opening : kwd_record;
|
||||
fields : (field_assign reg, semi) nsepseq;
|
||||
terminator : semi option;
|
||||
close : kwd_end
|
||||
}
|
||||
|
||||
and field_assign = {
|
||||
field_name : field_name;
|
||||
equal : equal;
|
||||
field_expr : expr
|
||||
}
|
||||
|
||||
and record_projection = {
|
||||
record_name : variable;
|
||||
selector : dot;
|
||||
field_path : (field_name, dot) nsepseq
|
||||
}
|
||||
|
||||
and tuple = (expr, comma) nsepseq par reg
|
||||
|
||||
and empty_list = typed_empty_list par
|
||||
|
||||
and typed_empty_list = {
|
||||
lbracket : lbracket;
|
||||
rbracket : rbracket;
|
||||
colon : colon;
|
||||
list_type : type_expr
|
||||
}
|
||||
|
||||
and empty_set = typed_empty_set par
|
||||
|
||||
and typed_empty_set = {
|
||||
lbrace : lbrace;
|
||||
rbrace : rbrace;
|
||||
colon : colon;
|
||||
set_type : type_expr
|
||||
}
|
||||
|
||||
and none_expr = typed_none_expr par
|
||||
|
||||
and typed_none_expr = {
|
||||
c_None : c_None;
|
||||
colon : colon;
|
||||
opt_type : type_expr
|
||||
}
|
||||
|
||||
and fun_call = (fun_name * arguments) reg
|
||||
|
||||
and arguments = tuple
|
||||
|
||||
and constr_app = (constr * arguments) reg
|
||||
|
||||
and map_lookup = {
|
||||
map_name : variable;
|
||||
selector : dot;
|
||||
index : expr brackets
|
||||
}
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
and pattern = (core_pattern, cons) nsepseq reg
|
||||
|
||||
and core_pattern =
|
||||
PVar of Lexer.lexeme reg
|
||||
and pattern =
|
||||
PCons of (pattern, cons) nsepseq reg
|
||||
| PVar of Lexer.lexeme reg
|
||||
| PWild of wild
|
||||
| PInt of (Lexer.lexeme * Z.t) reg
|
||||
| PBytes of (Lexer.lexeme * MBytes.t) reg
|
||||
| PBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| PString of Lexer.lexeme reg
|
||||
| PUnit of c_Unit
|
||||
| PFalse of c_False
|
||||
| PTrue of c_True
|
||||
| PNone of c_None
|
||||
| PSome of (c_Some * core_pattern par) reg
|
||||
| PSome of (c_Some * pattern par reg) reg
|
||||
| PList of list_pattern
|
||||
| PTuple of (core_pattern, comma) nsepseq par
|
||||
| PTuple of (pattern, comma) nsepseq par reg
|
||||
|
||||
and list_pattern =
|
||||
Sugar of (core_pattern, comma) sepseq brackets
|
||||
| Raw of (core_pattern * cons * pattern) par
|
||||
Sugar of (pattern, comma) sepseq brackets reg
|
||||
| Raw of (pattern * cons * pattern) par reg
|
||||
|
||||
(* Projecting regions *)
|
||||
|
||||
val type_expr_to_region : type_expr -> Region.t
|
||||
|
||||
val expr_to_region : expr -> Region.t
|
||||
|
||||
val instr_to_region : instruction -> Region.t
|
||||
|
||||
val core_pattern_to_region : core_pattern -> Region.t
|
||||
|
||||
val pattern_to_region : pattern -> Region.t
|
||||
val local_decl_to_region : local_decl -> Region.t
|
||||
val path_to_region : path -> Region.t
|
||||
val lhs_to_region : lhs -> Region.t
|
||||
val rhs_to_region : rhs -> Region.t
|
||||
|
||||
(* Printing *)
|
||||
|
||||
|
@ -18,7 +18,7 @@ module O = struct
|
||||
PVar of var_name
|
||||
| PWild
|
||||
| PInt of Z.t
|
||||
| PBytes of MBytes.t
|
||||
| PBytes of Hex.t
|
||||
| PString of string
|
||||
| PUnit
|
||||
| PFalse
|
||||
@ -42,6 +42,7 @@ module O = struct
|
||||
| Function of { arg: type_expr; ret: type_expr }
|
||||
| Ref of type_expr
|
||||
| String
|
||||
| Bytes
|
||||
| Int
|
||||
| Unit
|
||||
| Bool
|
||||
@ -80,7 +81,7 @@ module O = struct
|
||||
|
||||
and constant =
|
||||
Unit
|
||||
| Int of Z.t | String of string | Bytes of MBytes.t
|
||||
| Int of Z.t | String of string | Bytes of Hex.t
|
||||
| False | True
|
||||
| Null of type_expr
|
||||
| EmptySet of type_expr
|
||||
@ -653,7 +654,7 @@ let s_ast (ast : I.ast) : O.ast =
|
||||
(* and s_bytes {region; value = lexeme, abstract} = *)
|
||||
(* printf "%s: Bytes (\"%s\", \"0x%s\")\n" *)
|
||||
(* (compact region) lexeme *)
|
||||
(* (MBytes.to_hex abstract |> Hex.to_string) *)
|
||||
(* (Hex.to_string abstract) *)
|
||||
|
||||
(* and s_int {region; value = lexeme, abstract} = *)
|
||||
(* printf "%s: Int (\"%s\", %s)\n" *)
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Parsing the command-line option for testing the Ligo lexer and
|
||||
(* Parsing the command-line option for testing the LIGO lexer and
|
||||
parser *)
|
||||
|
||||
let printf = Printf.printf
|
||||
@ -11,16 +11,17 @@ let abort msg =
|
||||
|
||||
let help () =
|
||||
let file = Filename.basename Sys.argv.(0) in
|
||||
printf "Usage: %s [<option> ...] [<input>.li | \"-\"]\n" file;
|
||||
print_endline "where <input>.li is the Ligo source file (default: stdin),";
|
||||
printf "Usage: %s [<option> ...] [<input>.ligo | \"-\"]\n" file;
|
||||
print_endline "where <input>.ligo is the LIGO source file (default: stdin),";
|
||||
print_endline "and each <option> (if any) is one of the following:";
|
||||
print_endline " -I <paths> Library paths (colon-separated)";
|
||||
print_endline " -c, --copy Print lexemes of tokens and markup (lexer)";
|
||||
print_endline " -t, --tokens Print tokens (lexer)";
|
||||
print_endline " -u, --units Print tokens and markup (lexer)";
|
||||
print_endline " -q, --quiet No output, except errors (default)";
|
||||
print_endline " --columns Columns for source locations";
|
||||
print_endline " --bytes Bytes for source locations";
|
||||
print_endline " --verbose=<stages> cmdline, ast";
|
||||
print_endline " --verbose=<stages> cmdline, cpp, ast (colon-separated)";
|
||||
print_endline " --version Commit hash on stdout";
|
||||
print_endline " -h, --help This help";
|
||||
exit 0
|
||||
@ -39,9 +40,12 @@ and columns = ref false
|
||||
and bytes = ref false
|
||||
and verbose = ref Utils.String.Set.empty
|
||||
and input = ref None
|
||||
and libs = ref []
|
||||
|
||||
let split_at_colon = Str.(split (regexp ":"))
|
||||
|
||||
let add_path p = libs := !libs @ split_at_colon p
|
||||
|
||||
let add_verbose d =
|
||||
verbose := List.fold_left (Utils.swap Utils.String.Set.add)
|
||||
!verbose
|
||||
@ -49,6 +53,7 @@ let add_verbose d =
|
||||
|
||||
let specs =
|
||||
let open! Getopt in [
|
||||
'I', nolong, None, Some add_path;
|
||||
'c', "copy", set copy true, None;
|
||||
't', "tokens", set tokens true, None;
|
||||
'u', "units", set units true, None;
|
||||
@ -92,6 +97,10 @@ let string_of convert = function
|
||||
None -> "None"
|
||||
| Some s -> sprintf "Some %s" (convert s)
|
||||
|
||||
let string_of_path p =
|
||||
let apply s a = if a = "" then s else s ^ ":" ^ a
|
||||
in List.fold_right apply p ""
|
||||
|
||||
let quote s = sprintf "\"%s\"" s
|
||||
|
||||
let verbose_str =
|
||||
@ -108,7 +117,8 @@ let print_opt () =
|
||||
printf "columns = %b\n" !columns;
|
||||
printf "bytes = %b\n" !bytes;
|
||||
printf "verbose = \"%s\"\n" verbose_str;
|
||||
printf "input = %s\n" (string_of quote !input)
|
||||
printf "input = %s\n" (string_of quote !input);
|
||||
printf "libs = %s\n" (string_of_path !libs)
|
||||
;;
|
||||
|
||||
if Utils.String.Set.mem "cmdline" !verbose then print_opt ();;
|
||||
@ -117,11 +127,11 @@ let input =
|
||||
match !input with
|
||||
None | Some "-" -> !input
|
||||
| Some file_path ->
|
||||
if Filename.check_suffix file_path ".li"
|
||||
if Filename.check_suffix file_path ".ligo"
|
||||
then if Sys.file_exists file_path
|
||||
then Some file_path
|
||||
else abort "Source file not found."
|
||||
else abort "Source file lacks the extension .li."
|
||||
else abort "Source file lacks the extension .ligo."
|
||||
|
||||
(* Exporting remaining options as non-mutable values *)
|
||||
|
||||
@ -132,6 +142,7 @@ and quiet = !quiet
|
||||
and offsets = not !columns
|
||||
and mode = if !bytes then `Byte else `Point
|
||||
and verbose = !verbose
|
||||
and libs = !libs
|
||||
;;
|
||||
|
||||
if Utils.String.Set.mem "cmdline" verbose then
|
||||
@ -144,6 +155,7 @@ if Utils.String.Set.mem "cmdline" verbose then
|
||||
printf "offsets = %b\n" offsets;
|
||||
printf "mode = %s\n" (if mode = `Byte then "`Byte" else "`Point");
|
||||
printf "verbose = \"%s\"\n" verbose_str;
|
||||
printf "input = %s\n" (string_of quote input)
|
||||
printf "input = %s\n" (string_of quote input);
|
||||
printf "I = %s\n" (string_of_path libs)
|
||||
end
|
||||
;;
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Parsing the command-line option for testing the Ligo lexer and
|
||||
(* Parsing the command-line option for testing the LIGO lexer and
|
||||
parser *)
|
||||
|
||||
(* If the value [offsets] is [true], then the user requested that
|
||||
@ -25,6 +25,10 @@ val verbose : Utils.String.Set.t
|
||||
|
||||
val input : string option
|
||||
|
||||
(* Paths where to find LIGO files for inclusion *)
|
||||
|
||||
val libs : string list
|
||||
|
||||
(* If the value [cmd] is
|
||||
* [Quiet], then no output from the lexer and parser should be
|
||||
expected, safe error messages: this is the default value;
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* This signature defines the lexical tokens for Ligo
|
||||
(* This signature defines the lexical tokens for LIGO
|
||||
|
||||
_Tokens_ are the abstract units which are used by the parser to
|
||||
build the abstract syntax tree (AST), in other words, the stream of
|
||||
@ -29,7 +29,7 @@ type t =
|
||||
(* Literals *)
|
||||
|
||||
String of lexeme Region.reg
|
||||
| Bytes of (lexeme * MBytes.t) Region.reg
|
||||
| Bytes of (lexeme * Hex.t) Region.reg
|
||||
| Int of (lexeme * Z.t) Region.reg
|
||||
| Ident of lexeme Region.reg
|
||||
| Constr of lexeme Region.reg
|
||||
@ -50,8 +50,6 @@ type t =
|
||||
| ASS of Region.t (* ":=" *)
|
||||
| EQUAL of Region.t (* "=" *)
|
||||
| COLON of Region.t (* ":" *)
|
||||
| OR of Region.t (* "||" *)
|
||||
| AND of Region.t (* "&&" *)
|
||||
| LT of Region.t (* "<" *)
|
||||
| LEQ of Region.t (* "<=" *)
|
||||
| GT of Region.t (* ">" *)
|
||||
@ -67,32 +65,35 @@ type t =
|
||||
|
||||
(* Keywords *)
|
||||
|
||||
| And of Region.t (* "and" *)
|
||||
| Begin of Region.t (* "begin" *)
|
||||
| Case of Region.t (* "case" *)
|
||||
| Const of Region.t (* "const" *)
|
||||
| Down of Region.t (* "down" *)
|
||||
| Else of Region.t (* "else" *)
|
||||
| End of Region.t (* "end" *)
|
||||
| Entrypoint of Region.t (* "entrypoint" *)
|
||||
| Fail of Region.t (* "fail" *)
|
||||
| For of Region.t (* "for" *)
|
||||
| Function of Region.t (* "function" *)
|
||||
| If of Region.t (* "if" *)
|
||||
| In of Region.t (* "in" *)
|
||||
| Is of Region.t (* "is" *)
|
||||
| Entrypoint of Region.t (* "entrypoint" *)
|
||||
| For of Region.t (* "for" *)
|
||||
| Function of Region.t (* "function" *)
|
||||
| Storage of Region.t (* "storage" *)
|
||||
| Type of Region.t (* "type" *)
|
||||
| Of of Region.t (* "of" *)
|
||||
| Operations of Region.t (* "operations" *)
|
||||
| Var of Region.t (* "var" *)
|
||||
| End of Region.t (* "end" *)
|
||||
| Then of Region.t (* "then" *)
|
||||
| Else of Region.t (* "else" *)
|
||||
| Match of Region.t (* "match" *)
|
||||
| Null of Region.t (* "null" *)
|
||||
| Procedure of Region.t (* "procedure" *)
|
||||
| Record of Region.t (* "record" *)
|
||||
| Step of Region.t (* "step" *)
|
||||
| To of Region.t (* "to" *)
|
||||
| Map of Region.t (* "map" *)
|
||||
| Mod of Region.t (* "mod" *)
|
||||
| Not of Region.t (* "not" *)
|
||||
| Of of Region.t (* "of" *)
|
||||
| Or of Region.t (* "or" *)
|
||||
| Patch of Region.t (* "patch" *)
|
||||
| Procedure of Region.t (* "procedure" *)
|
||||
| Record of Region.t (* "record" *)
|
||||
| Skip of Region.t (* "skip" *)
|
||||
| Step of Region.t (* "step" *)
|
||||
| Storage of Region.t (* "storage" *)
|
||||
| Then of Region.t (* "then" *)
|
||||
| To of Region.t (* "to" *)
|
||||
| Type of Region.t (* "type" *)
|
||||
| Var of Region.t (* "var" *)
|
||||
| While of Region.t (* "while" *)
|
||||
| With of Region.t (* "with" *)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Lexer specification for Ligo, to be processed by [ocamllex] *)
|
||||
(* Lexer specification for LIGO, to be processed by [ocamllex] *)
|
||||
|
||||
{
|
||||
(* START HEADER *)
|
||||
@ -28,7 +28,7 @@ type t =
|
||||
(* Literals *)
|
||||
|
||||
String of lexeme Region.reg
|
||||
| Bytes of (lexeme * MBytes.t) Region.reg
|
||||
| Bytes of (lexeme * Hex.t) Region.reg
|
||||
| Int of (lexeme * Z.t) Region.reg
|
||||
| Ident of lexeme Region.reg
|
||||
| Constr of lexeme Region.reg
|
||||
@ -49,8 +49,6 @@ type t =
|
||||
| ASS of Region.t
|
||||
| EQUAL of Region.t
|
||||
| COLON of Region.t
|
||||
| OR of Region.t
|
||||
| AND of Region.t
|
||||
| LT of Region.t
|
||||
| LEQ of Region.t
|
||||
| GT of Region.t
|
||||
@ -66,34 +64,37 @@ type t =
|
||||
|
||||
(* Keywords *)
|
||||
|
||||
| Begin of Region.t
|
||||
| Const of Region.t
|
||||
| Down of Region.t
|
||||
| Fail of Region.t
|
||||
| If of Region.t
|
||||
| In of Region.t
|
||||
| Is of Region.t
|
||||
| Entrypoint of Region.t
|
||||
| For of Region.t
|
||||
| Function of Region.t
|
||||
| Storage of Region.t
|
||||
| Type of Region.t
|
||||
| Of of Region.t
|
||||
| Operations of Region.t
|
||||
| Var of Region.t
|
||||
| End of Region.t
|
||||
| Then of Region.t
|
||||
| Else of Region.t
|
||||
| Match of Region.t
|
||||
| Null of Region.t
|
||||
| Procedure of Region.t
|
||||
| Record of Region.t
|
||||
| Step of Region.t
|
||||
| To of Region.t
|
||||
| Mod of Region.t
|
||||
| Not of Region.t
|
||||
| While of Region.t
|
||||
| With of Region.t
|
||||
| And of Region.t (* "and" *)
|
||||
| Begin of Region.t (* "begin" *)
|
||||
| Case of Region.t (* "case" *)
|
||||
| Const of Region.t (* "const" *)
|
||||
| Down of Region.t (* "down" *)
|
||||
| Else of Region.t (* "else" *)
|
||||
| End of Region.t (* "end" *)
|
||||
| Entrypoint of Region.t (* "entrypoint" *)
|
||||
| Fail of Region.t (* "fail" *)
|
||||
| For of Region.t (* "for" *)
|
||||
| Function of Region.t (* "function" *)
|
||||
| If of Region.t (* "if" *)
|
||||
| In of Region.t (* "in" *)
|
||||
| Is of Region.t (* "is" *)
|
||||
| Map of Region.t (* "map" *)
|
||||
| Mod of Region.t (* "mod" *)
|
||||
| Not of Region.t (* "not" *)
|
||||
| Of of Region.t (* "of" *)
|
||||
| Or of Region.t (* "or" *)
|
||||
| Patch of Region.t (* "patch" *)
|
||||
| Procedure of Region.t (* "procedure" *)
|
||||
| Record of Region.t (* "record" *)
|
||||
| Skip of Region.t (* "skip" *)
|
||||
| Step of Region.t (* "step" *)
|
||||
| Storage of Region.t (* "storage" *)
|
||||
| Then of Region.t (* "then" *)
|
||||
| To of Region.t (* "to" *)
|
||||
| Type of Region.t (* "type" *)
|
||||
| Var of Region.t (* "var" *)
|
||||
| While of Region.t (* "while" *)
|
||||
| With of Region.t (* "with" *)
|
||||
|
||||
(* Types *)
|
||||
(*
|
||||
@ -141,7 +142,7 @@ let proj_token = function
|
||||
| Bytes Region.{region; value = s,b} ->
|
||||
region,
|
||||
sprintf "Bytes (\"%s\", \"0x%s\")"
|
||||
s (MBytes.to_hex b |> Hex.to_string)
|
||||
s (Hex.to_string b)
|
||||
|
||||
| Int Region.{region; value = s,n} ->
|
||||
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
|
||||
@ -168,8 +169,6 @@ let proj_token = function
|
||||
| ASS region -> region, "ASS"
|
||||
| EQUAL region -> region, "EQUAL"
|
||||
| COLON region -> region, "COLON"
|
||||
| OR region -> region, "OR"
|
||||
| AND region -> region, "AND"
|
||||
| LT region -> region, "LT"
|
||||
| LEQ region -> region, "LEQ"
|
||||
| GT region -> region, "GT"
|
||||
@ -185,32 +184,35 @@ let proj_token = function
|
||||
|
||||
(* Keywords *)
|
||||
|
||||
| And region -> region, "And"
|
||||
| Begin region -> region, "Begin"
|
||||
| Case region -> region, "Case"
|
||||
| Const region -> region, "Const"
|
||||
| Down region -> region, "Down"
|
||||
| Else region -> region, "Else"
|
||||
| End region -> region, "End"
|
||||
| Entrypoint region -> region, "Entrypoint"
|
||||
| Fail region -> region, "Fail"
|
||||
| For region -> region, "For"
|
||||
| Function region -> region, "Function"
|
||||
| If region -> region, "If"
|
||||
| In region -> region, "In"
|
||||
| Is region -> region, "Is"
|
||||
| Entrypoint region -> region, "Entrypoint"
|
||||
| For region -> region, "For"
|
||||
| Function region -> region, "Function"
|
||||
| Storage region -> region, "Storage"
|
||||
| Type region -> region, "Type"
|
||||
| Of region -> region, "Of"
|
||||
| Operations region -> region, "Operations"
|
||||
| Var region -> region, "Var"
|
||||
| End region -> region, "End"
|
||||
| Then region -> region, "Then"
|
||||
| Else region -> region, "Else"
|
||||
| Match region -> region, "Match"
|
||||
| Null region -> region, "Null"
|
||||
| Procedure region -> region, "Procedure"
|
||||
| Record region -> region, "Record"
|
||||
| Step region -> region, "Step"
|
||||
| To region -> region, "To"
|
||||
| Map region -> region, "Map"
|
||||
| Mod region -> region, "Mod"
|
||||
| Not region -> region, "Not"
|
||||
| Of region -> region, "Of"
|
||||
| Or region -> region, "Or"
|
||||
| Patch region -> region, "Patch"
|
||||
| Procedure region -> region, "Procedure"
|
||||
| Record region -> region, "Record"
|
||||
| Skip region -> region, "Skip"
|
||||
| Step region -> region, "Step"
|
||||
| Storage region -> region, "Storage"
|
||||
| Then region -> region, "Then"
|
||||
| To region -> region, "To"
|
||||
| Type region -> region, "Type"
|
||||
| Var region -> region, "Var"
|
||||
| While region -> region, "While"
|
||||
| With region -> region, "With"
|
||||
|
||||
@ -252,8 +254,6 @@ let to_lexeme = function
|
||||
| ASS _ -> ":="
|
||||
| EQUAL _ -> "="
|
||||
| COLON _ -> ":"
|
||||
| OR _ -> "||"
|
||||
| AND _ -> "&&"
|
||||
| LT _ -> "<"
|
||||
| LEQ _ -> "<="
|
||||
| GT _ -> ">"
|
||||
@ -269,7 +269,9 @@ let to_lexeme = function
|
||||
|
||||
(* Keywords *)
|
||||
|
||||
| And _ -> "and"
|
||||
| Begin _ -> "begin"
|
||||
| Case _ -> "case"
|
||||
| Const _ -> "const"
|
||||
| Down _ -> "down"
|
||||
| Fail _ -> "fail"
|
||||
@ -279,19 +281,20 @@ let to_lexeme = function
|
||||
| Entrypoint _ -> "entrypoint"
|
||||
| For _ -> "for"
|
||||
| Function _ -> "function"
|
||||
| Storage _ -> "storage"
|
||||
| Type _ -> "type"
|
||||
| Of _ -> "of"
|
||||
| Operations _ -> "operations"
|
||||
| Or _ -> "or"
|
||||
| Var _ -> "var"
|
||||
| End _ -> "end"
|
||||
| Then _ -> "then"
|
||||
| Else _ -> "else"
|
||||
| Match _ -> "match"
|
||||
| Null _ -> "null"
|
||||
| Map _ -> "map"
|
||||
| Patch _ -> "patch"
|
||||
| Procedure _ -> "procedure"
|
||||
| Record _ -> "record"
|
||||
| Skip _ -> "skip"
|
||||
| Step _ -> "step"
|
||||
| Storage _ -> "storage"
|
||||
| To _ -> "to"
|
||||
| Mod _ -> "mod"
|
||||
| Not _ -> "not"
|
||||
@ -321,7 +324,9 @@ let to_region token = proj_token token |> fst
|
||||
(* LEXIS *)
|
||||
|
||||
let keywords = [
|
||||
(fun reg -> And reg);
|
||||
(fun reg -> Begin reg);
|
||||
(fun reg -> Case reg);
|
||||
(fun reg -> Const reg);
|
||||
(fun reg -> Down reg);
|
||||
(fun reg -> Fail reg);
|
||||
@ -331,19 +336,20 @@ let keywords = [
|
||||
(fun reg -> Entrypoint reg);
|
||||
(fun reg -> For reg);
|
||||
(fun reg -> Function reg);
|
||||
(fun reg -> Storage reg);
|
||||
(fun reg -> Type reg);
|
||||
(fun reg -> Of reg);
|
||||
(fun reg -> Operations reg);
|
||||
(fun reg -> Or reg);
|
||||
(fun reg -> Var reg);
|
||||
(fun reg -> End reg);
|
||||
(fun reg -> Then reg);
|
||||
(fun reg -> Else reg);
|
||||
(fun reg -> Match reg);
|
||||
(fun reg -> Null reg);
|
||||
(fun reg -> Map reg);
|
||||
(fun reg -> Patch reg);
|
||||
(fun reg -> Procedure reg);
|
||||
(fun reg -> Record reg);
|
||||
(fun reg -> Skip reg);
|
||||
(fun reg -> Step reg);
|
||||
(fun reg -> Storage reg);
|
||||
(fun reg -> To reg);
|
||||
(fun reg -> Mod reg);
|
||||
(fun reg -> Not reg);
|
||||
@ -353,8 +359,7 @@ let keywords = [
|
||||
|
||||
let reserved =
|
||||
let open SSet in
|
||||
empty |> add "and"
|
||||
|> add "as"
|
||||
empty |> add "as"
|
||||
|> add "asr"
|
||||
|> add "assert"
|
||||
|> add "class"
|
||||
@ -384,7 +389,6 @@ let reserved =
|
||||
|> add "nonrec"
|
||||
|> add "object"
|
||||
|> add "open"
|
||||
|> add "or"
|
||||
|> add "private"
|
||||
|> add "rec"
|
||||
|> add "sig"
|
||||
@ -466,7 +470,7 @@ let mk_string lexeme region = String Region.{region; value=lexeme}
|
||||
|
||||
let mk_bytes lexeme region =
|
||||
let norm = Str.(global_replace (regexp "_") "" lexeme) in
|
||||
let value = lexeme, MBytes.of_hex (Hex.of_string norm)
|
||||
let value = lexeme, Hex.of_string norm
|
||||
in Bytes Region.{region; value}
|
||||
|
||||
type int_err = Non_canonical_zero
|
||||
@ -496,8 +500,6 @@ let mk_sym lexeme region =
|
||||
| ":=" -> ASS region
|
||||
| "=" -> EQUAL region
|
||||
| ":" -> COLON region
|
||||
| "||" -> OR region
|
||||
| "&&" -> AND region
|
||||
| "<" -> LT region
|
||||
| "<=" -> LEQ region
|
||||
| ">" -> GT region
|
||||
@ -545,7 +547,9 @@ let is_ident = function
|
||||
| _ -> false
|
||||
|
||||
let is_kwd = function
|
||||
And _
|
||||
| Begin _
|
||||
| Case _
|
||||
| Const _
|
||||
| Down _
|
||||
| Fail _
|
||||
@ -555,19 +559,20 @@ let is_kwd = function
|
||||
| Entrypoint _
|
||||
| For _
|
||||
| Function _
|
||||
| Storage _
|
||||
| Type _
|
||||
| Of _
|
||||
| Operations _
|
||||
| Or _
|
||||
| Var _
|
||||
| End _
|
||||
| Then _
|
||||
| Else _
|
||||
| Match _
|
||||
| Null _
|
||||
| Map _
|
||||
| Patch _
|
||||
| Procedure _
|
||||
| Record _
|
||||
| Skip _
|
||||
| Step _
|
||||
| Storage _
|
||||
| To _
|
||||
| Mod _
|
||||
| Not _
|
||||
@ -599,8 +604,6 @@ let is_sym = function
|
||||
| ASS _
|
||||
| EQUAL _
|
||||
| COLON _
|
||||
| OR _
|
||||
| AND _
|
||||
| LT _
|
||||
| LEQ _
|
||||
| GT _
|
||||
|
@ -1,16 +1,16 @@
|
||||
(* Lexer specification for Ligo, to be processed by [ocamllex].
|
||||
(* Lexer specification for LIGO, to be processed by [ocamllex].
|
||||
|
||||
The underlying design principles are:
|
||||
|
||||
(1) enforce stylistic constraints at a lexical level, in order to
|
||||
early reject potentially misleading or poorly written
|
||||
Ligo contracts;
|
||||
LIGO contracts;
|
||||
|
||||
(2) provide precise error messages with hint as how to fix the
|
||||
issue, which is achieved by consulting the lexical
|
||||
right-context of lexemes;
|
||||
|
||||
(3) be as independent as possible from the Ligo version, so
|
||||
(3) be as independent as possible from the LIGO version, so
|
||||
upgrades have as little impact as possible on this
|
||||
specification: this is achieved by using the most general
|
||||
regular expressions to match the lexing buffer and broadly
|
||||
@ -27,7 +27,7 @@
|
||||
be contextualised by the lexer in terms of input source regions, so
|
||||
useful error messages can be printed, therefore they are part of
|
||||
the signature [TOKEN] that parameterise the functor generated
|
||||
here. For instance, if, in a future release of Ligo, new tokens may
|
||||
here. For instance, if, in a future release of LIGO, new tokens may
|
||||
be added, and the recognition of their lexemes may entail new
|
||||
errors, the signature [TOKEN] will have to be augmented and the
|
||||
lexer specification changed. However, it is more likely that
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Lexer specification for Ligo, to be processed by [ocamllex]. *)
|
||||
(* Lexer specification for LIGO, to be processed by [ocamllex]. *)
|
||||
|
||||
{
|
||||
(* START HEADER *)
|
||||
@ -46,13 +46,30 @@ let reset_file ~file buffer =
|
||||
let open Lexing in
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname = file}
|
||||
|
||||
let reset_line line_num buffer =
|
||||
let reset_line ~line buffer =
|
||||
assert (line >= 0);
|
||||
let open Lexing in
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_lnum = line_num}
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_lnum = line}
|
||||
|
||||
let reset ~file ?(line=1) buffer =
|
||||
(* Default value per the [Lexing] standard module convention *)
|
||||
reset_file ~file buffer; reset_line line buffer
|
||||
let reset_offset ~offset buffer =
|
||||
assert (offset >= 0);
|
||||
Printf.printf "[reset] offset=%i\n" offset;
|
||||
let open Lexing in
|
||||
let bol = buffer.lex_curr_p.pos_bol in
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_cnum = bol (*+ offset*)}
|
||||
|
||||
let reset ?file ?line ?offset buffer =
|
||||
let () =
|
||||
match file with
|
||||
Some file -> reset_file ~file buffer
|
||||
| None -> () in
|
||||
let () =
|
||||
match line with
|
||||
Some line -> reset_line ~line buffer
|
||||
| None -> () in
|
||||
match offset with
|
||||
Some offset -> reset_offset ~offset buffer
|
||||
| None -> ()
|
||||
|
||||
(* Rolling back one lexeme _within the current semantic action_ *)
|
||||
|
||||
@ -192,12 +209,12 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
|
||||
(* STATE *)
|
||||
|
||||
(* Beyond tokens, the result of lexing is a state (a so-called
|
||||
_state monad_). The type [state] represents the logical state
|
||||
of the lexing engine, that is, a value which is threaded during
|
||||
scanning and which denotes useful, high-level information
|
||||
beyond what the type [Lexing.lexbuf] in the standard library
|
||||
already provides for all generic lexers.
|
||||
(* Beyond tokens, the result of lexing is a state. The type
|
||||
[state] represents the logical state of the lexing engine, that
|
||||
is, a value which is threaded during scanning and which denotes
|
||||
useful, high-level information beyond what the type
|
||||
[Lexing.lexbuf] in the standard library already provides for
|
||||
all generic lexers.
|
||||
|
||||
Tokens are the smallest units used by the parser to build the
|
||||
abstract syntax tree. The state includes a queue of recognised
|
||||
@ -214,13 +231,13 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
lexer.
|
||||
|
||||
The state also includes a field [pos] which holds the current
|
||||
position in the Ligo source file. The position is not always
|
||||
position in the LIGO source file. The position is not always
|
||||
updated after a single character has been matched: that depends
|
||||
on the regular expression that matched the lexing buffer.
|
||||
|
||||
The fields [decoder] and [supply] offer the support needed
|
||||
for the lexing of UTF-8 encoded characters in comments (the
|
||||
only place where they are allowed in Ligo). The former is the
|
||||
only place where they are allowed in LIGO). The former is the
|
||||
decoder proper and the latter is the effectful function
|
||||
[supply] that takes a byte, a start index and a length and feed
|
||||
it to [decoder]. See the documentation of the third-party
|
||||
@ -427,6 +444,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
|
||||
let utf8_bom = "\xEF\xBB\xBF" (* Byte Order Mark for UTF-8 *)
|
||||
let nl = ['\n' '\r'] | "\r\n"
|
||||
let blank = ' ' | '\t'
|
||||
let digit = ['0'-'9']
|
||||
let natural = digit | digit (digit | '_')* digit
|
||||
let integer = '-'? natural
|
||||
@ -444,8 +462,9 @@ let esc = "\\n" | "\\\"" | "\\\\" | "\\b"
|
||||
let symbol = ';' | ','
|
||||
| '(' | ')' | '{' | '}' | '[' | ']'
|
||||
| '#' | '|' | "->" | ":=" | '=' | ':'
|
||||
| "||" | "&&" | '<' | "<=" | '>' | ">=" | "=/="
|
||||
| '<' | "<=" | '>' | ">=" | "=/="
|
||||
| '+' | '-' | '*' | '.' | '_' | '^'
|
||||
let string = [^'"' '\\' '\n']* (* For strings of #include *)
|
||||
|
||||
(* RULES *)
|
||||
|
||||
@ -487,6 +506,43 @@ and scan state = parse
|
||||
let state = scan_line thread state lexbuf |> push_line
|
||||
in scan state lexbuf }
|
||||
|
||||
(* Management of #include CPP directives
|
||||
|
||||
An input LIGO program may contain GNU CPP (C preprocessor)
|
||||
directives, and the entry modules (named *Main.ml) run CPP on them
|
||||
in traditional mode:
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Traditional-Mode.html
|
||||
|
||||
The main interest in using CPP is that it can stand for a poor
|
||||
man's (flat) module system for LIGO thanks to #include
|
||||
directives, and the traditional mode leaves the markup mostly
|
||||
undisturbed.
|
||||
|
||||
Some of the #line resulting from processing #include directives
|
||||
deal with system file headers and thus have to be ignored for our
|
||||
purpose. Moreover, these #line directives may also carry some
|
||||
additional flags:
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html
|
||||
|
||||
of which 1 and 2 indicate, respectively, the start of a new file
|
||||
and the return from a file (after its inclusion has been
|
||||
processed).
|
||||
*)
|
||||
|
||||
| '#' blank* ("line" blank+)? (integer as line) blank+
|
||||
'"' (string as file) '"' {
|
||||
let _, _, state = sync state lexbuf in
|
||||
let flags, state = scan_flags state [] lexbuf in
|
||||
let () = ignore flags in
|
||||
let line = int_of_string line
|
||||
and file = Filename.basename file in
|
||||
let pos = state.pos#set ~file ~line ~offset:0 in
|
||||
let state = {state with pos} in
|
||||
scan state lexbuf
|
||||
}
|
||||
|
||||
(* Some special errors
|
||||
|
||||
Some special errors are recognised in the semantic actions of the
|
||||
@ -517,6 +573,18 @@ and scan state = parse
|
||||
| _ as c { let region, _, _ = sync state lexbuf
|
||||
in fail region (Unexpected_character c) }
|
||||
|
||||
(* Scanning CPP #include flags *)
|
||||
|
||||
and scan_flags state acc = parse
|
||||
blank+ { let _, _, state = sync state lexbuf
|
||||
in scan_flags state acc lexbuf }
|
||||
| integer as code { let _, _, state = sync state lexbuf in
|
||||
let acc = int_of_string code :: acc
|
||||
in scan_flags state acc lexbuf }
|
||||
| nl { List.rev acc, push_newline state lexbuf }
|
||||
| eof { let _, _, state = sync state lexbuf
|
||||
in List.rev acc, state (* TODO *) }
|
||||
|
||||
(* Finishing a string *)
|
||||
|
||||
and scan_string thread state = parse
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Driver for the lexer of Ligo *)
|
||||
(* Driver for the lexer of LIGO *)
|
||||
|
||||
open! EvalOpt (* Reads the command-line options: Effectful! *)
|
||||
|
||||
@ -9,9 +9,47 @@ let () = Printexc.record_backtrace true
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match EvalOpt.libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
let prefix =
|
||||
match EvalOpt.input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp.li"
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" EvalOpt.verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match EvalOpt.input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - -o %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s -o %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" EvalOpt.verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(* Running the lexer on the input file *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
let () = Lexer.trace ~offsets:EvalOpt.offsets
|
||||
EvalOpt.mode EvalOpt.input EvalOpt.cmd
|
||||
EvalOpt.mode (Some pp_input) EvalOpt.cmd
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* This module defines the sorts of markup recognised by the Ligo
|
||||
(* This module defines the sorts of markup recognised by the LIGO
|
||||
lexer *)
|
||||
|
||||
(* A lexeme is piece of concrete syntax belonging to a token. In
|
||||
|
@ -6,7 +6,7 @@
|
||||
(* Literals *)
|
||||
|
||||
%token <LexToken.lexeme Region.reg> String
|
||||
%token <(LexToken.lexeme * MBytes.t) Region.reg> Bytes
|
||||
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes
|
||||
%token <(LexToken.lexeme * Z.t) Region.reg> Int
|
||||
%token <LexToken.lexeme Region.reg> Ident
|
||||
%token <LexToken.lexeme Region.reg> Constr
|
||||
@ -27,8 +27,6 @@
|
||||
%token <Region.t> ASS (* ":=" *)
|
||||
%token <Region.t> EQUAL (* "=" *)
|
||||
%token <Region.t> COLON (* ":" *)
|
||||
%token <Region.t> OR (* "||" *)
|
||||
%token <Region.t> AND (* "&&" *)
|
||||
%token <Region.t> LT (* "<" *)
|
||||
%token <Region.t> LEQ (* "<=" *)
|
||||
%token <Region.t> GT (* ">" *)
|
||||
@ -44,7 +42,9 @@
|
||||
|
||||
(* Keywords *)
|
||||
|
||||
%token <Region.t> And (* "and" *)
|
||||
%token <Region.t> Begin (* "begin" *)
|
||||
%token <Region.t> Case (* "case" *)
|
||||
%token <Region.t> Const (* "const" *)
|
||||
%token <Region.t> Down (* "down" *)
|
||||
%token <Region.t> Fail (* "fail" *)
|
||||
@ -54,19 +54,20 @@
|
||||
%token <Region.t> Entrypoint (* "entrypoint" *)
|
||||
%token <Region.t> For (* "for" *)
|
||||
%token <Region.t> Function (* "function" *)
|
||||
%token <Region.t> Storage (* "storage" *)
|
||||
%token <Region.t> Type (* "type" *)
|
||||
%token <Region.t> Of (* "of" *)
|
||||
%token <Region.t> Operations (* "operations" *)
|
||||
%token <Region.t> Or (* "or" *)
|
||||
%token <Region.t> Var (* "var" *)
|
||||
%token <Region.t> End (* "end" *)
|
||||
%token <Region.t> Then (* "then" *)
|
||||
%token <Region.t> Else (* "else" *)
|
||||
%token <Region.t> Match (* "match" *)
|
||||
%token <Region.t> Null (* "null" *)
|
||||
%token <Region.t> Map (* "map" *)
|
||||
%token <Region.t> Patch (* "patch" *)
|
||||
%token <Region.t> Procedure (* "procedure" *)
|
||||
%token <Region.t> Record (* "record" *)
|
||||
%token <Region.t> Skip (* "skip" *)
|
||||
%token <Region.t> Step (* "step" *)
|
||||
%token <Region.t> Storage (* "storage" *)
|
||||
%token <Region.t> To (* "to" *)
|
||||
%token <Region.t> Mod (* "mod" *)
|
||||
%token <Region.t> Not (* "not" *)
|
||||
|
@ -13,32 +13,72 @@ open AST
|
||||
|
||||
(* Entry points *)
|
||||
|
||||
%start program interactive_expr
|
||||
%type <AST.t> program
|
||||
%start contract interactive_expr
|
||||
%type <AST.t> contract
|
||||
%type <AST.expr> interactive_expr
|
||||
|
||||
%%
|
||||
|
||||
(* RULES *)
|
||||
|
||||
(* The rule [series(Item)] parses a list of [Item] separated by
|
||||
semi-colons and optionally terminated by a semi-colon, then the
|
||||
keyword [End]. *)
|
||||
|
||||
series(Item):
|
||||
Item after_item(Item) { $1,$2 }
|
||||
|
||||
after_item(Item):
|
||||
SEMI item_or_end(Item) {
|
||||
match $2 with
|
||||
`Some (item, items, term, close) ->
|
||||
($1, item)::items, term, close
|
||||
| `End close ->
|
||||
[], Some $1, close
|
||||
}
|
||||
| End {
|
||||
[], None, $1
|
||||
}
|
||||
|
||||
item_or_end(Item):
|
||||
End {
|
||||
`End $1
|
||||
}
|
||||
| series(Item) {
|
||||
let item, (items, term, close) = $1
|
||||
in `Some (item, items, term, close)
|
||||
}
|
||||
|
||||
(* Compound constructs *)
|
||||
|
||||
par(X):
|
||||
LPAR X RPAR {
|
||||
let region = cover $1 $3
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {
|
||||
lpar = $1;
|
||||
inside = $2;
|
||||
rpar = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
braces(X):
|
||||
LBRACE X RBRACE {
|
||||
let region = cover $1 $3
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {
|
||||
lbrace = $1;
|
||||
inside = $2;
|
||||
rbrace = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
brackets(X):
|
||||
LBRACKET X RBRACKET {
|
||||
let region = cover $1 $3
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {
|
||||
lbracket = $1;
|
||||
inside = $2;
|
||||
rbracket = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
(* Sequences
|
||||
@ -85,11 +125,11 @@ sepseq(X,Sep):
|
||||
%inline type_name : Ident { $1 }
|
||||
%inline fun_name : Ident { $1 }
|
||||
%inline field_name : Ident { $1 }
|
||||
%inline map_name : Ident { $1 }
|
||||
%inline record_name : Ident { $1 }
|
||||
|
||||
(* Main *)
|
||||
|
||||
program:
|
||||
contract:
|
||||
nseq(declaration) EOF {
|
||||
{decl = $1; eof = $2}
|
||||
}
|
||||
@ -97,50 +137,16 @@ program:
|
||||
declaration:
|
||||
type_decl { TypeDecl $1 }
|
||||
| const_decl { ConstDecl $1 }
|
||||
| storage_decl { StorageDecl $1 }
|
||||
| operations_decl { OpDecl $1 }
|
||||
| lambda_decl { LambdaDecl $1 }
|
||||
|
||||
storage_decl:
|
||||
Storage var COLON type_expr option(SEMI) {
|
||||
let stop =
|
||||
match $5 with
|
||||
None -> type_expr_to_region $4
|
||||
| Some region -> region in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_storage = $1;
|
||||
name = $2;
|
||||
colon = $3;
|
||||
store_type = $4;
|
||||
terminator = $5}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
operations_decl:
|
||||
Operations var COLON type_expr option(SEMI) {
|
||||
let stop =
|
||||
match $5 with
|
||||
None -> type_expr_to_region $4
|
||||
| Some region -> region in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_operations = $1;
|
||||
name = $2;
|
||||
colon = $3;
|
||||
op_type = $4;
|
||||
terminator = $5}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type_decl:
|
||||
Type type_name Is type_expr option(SEMI) {
|
||||
let stop =
|
||||
match $5 with
|
||||
None -> type_expr_to_region $4
|
||||
| Some region -> region in
|
||||
Some region -> region
|
||||
| None -> type_expr_to_region $4 in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_type = $1;
|
||||
@ -151,9 +157,9 @@ type_decl:
|
||||
in {region; value}}
|
||||
|
||||
type_expr:
|
||||
cartesian { Prod $1 }
|
||||
| sum_type { Sum $1 }
|
||||
| record_type { Record $1 }
|
||||
cartesian { TProd $1 }
|
||||
| sum_type { TSum $1 }
|
||||
| record_type { TRecord $1 }
|
||||
|
||||
cartesian:
|
||||
nsepseq(core_type,TIMES) {
|
||||
@ -167,14 +173,19 @@ core_type:
|
||||
}
|
||||
| type_name type_tuple {
|
||||
let region = cover $1.region $2.region
|
||||
in TypeApp {region; value = $1,$2}
|
||||
in TApp {region; value = $1,$2}
|
||||
}
|
||||
| Map type_tuple {
|
||||
let region = cover $1 $2.region in
|
||||
let value = {value="map"; region=$1}
|
||||
in TApp {region; value = value, $2}
|
||||
}
|
||||
| par(type_expr) {
|
||||
ParType $1
|
||||
TPar $1
|
||||
}
|
||||
|
||||
type_tuple:
|
||||
par(nsepseq(type_name,COMMA)) { $1 }
|
||||
par(nsepseq(type_expr,COMMA)) { $1 }
|
||||
|
||||
sum_type:
|
||||
nsepseq(variant,VBAR) {
|
||||
@ -185,7 +196,8 @@ sum_type:
|
||||
variant:
|
||||
Constr Of cartesian {
|
||||
let region = cover $1.region $3.region
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {constr = $1; kwd_of = $2; product = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
record_type:
|
||||
@ -194,14 +206,16 @@ record_type:
|
||||
End
|
||||
{
|
||||
let region = cover $1 $3
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {kwd_record = $1; fields = $2; kwd_end = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
field_decl:
|
||||
field_name COLON type_expr {
|
||||
let stop = type_expr_to_region $3 in
|
||||
let region = cover $1.region stop
|
||||
in {region; value = $1,$2,$3}
|
||||
and value = {field_name = $1; colon = $2; field_type = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
(* Function and procedure declarations *)
|
||||
@ -218,8 +232,8 @@ fun_decl:
|
||||
With expr option(SEMI) {
|
||||
let stop =
|
||||
match $11 with
|
||||
None -> expr_to_region $10
|
||||
| Some region -> region in
|
||||
Some region -> region
|
||||
| None -> expr_to_region $10 in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_function = $1;
|
||||
@ -236,6 +250,34 @@ fun_decl:
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
entry_decl:
|
||||
Entrypoint fun_name entry_params COLON type_expr Is
|
||||
seq(local_decl)
|
||||
block
|
||||
With expr option(SEMI) {
|
||||
let stop =
|
||||
match $11 with
|
||||
Some region -> region
|
||||
| None -> expr_to_region $10 in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_entrypoint = $1;
|
||||
name = $2;
|
||||
param = $3;
|
||||
colon = $4;
|
||||
ret_type = $5;
|
||||
kwd_is = $6;
|
||||
local_decls = $7;
|
||||
block = $8;
|
||||
kwd_with = $9;
|
||||
return = $10;
|
||||
terminator = $11}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
entry_params:
|
||||
par(nsepseq(entry_param_decl,SEMI)) { $1 }
|
||||
|
||||
proc_decl:
|
||||
Procedure fun_name parameters Is
|
||||
seq(local_decl)
|
||||
@ -243,8 +285,8 @@ proc_decl:
|
||||
{
|
||||
let stop =
|
||||
match $7 with
|
||||
None -> $6.region
|
||||
| Some region -> region in
|
||||
Some region -> region
|
||||
| None -> $6.region in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_procedure = $1;
|
||||
@ -257,27 +299,6 @@ proc_decl:
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
entry_decl:
|
||||
Entrypoint fun_name parameters Is
|
||||
seq(local_decl)
|
||||
block option(SEMI)
|
||||
{
|
||||
let stop =
|
||||
match $7 with
|
||||
None -> $6.region
|
||||
| Some region -> region in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_entrypoint = $1;
|
||||
name = $2;
|
||||
param = $3;
|
||||
kwd_is = $4;
|
||||
local_decls = $5;
|
||||
block = $6;
|
||||
terminator = $7}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
parameters:
|
||||
par(nsepseq(param_decl,SEMI)) { $1 }
|
||||
|
||||
@ -285,104 +306,199 @@ param_decl:
|
||||
Var var COLON type_expr {
|
||||
let stop = type_expr_to_region $4 in
|
||||
let region = cover $1 stop
|
||||
in ParamVar {region; value = $1,$2,$3,$4}
|
||||
and value = {
|
||||
kwd_var = $1;
|
||||
var = $2;
|
||||
colon = $3;
|
||||
param_type = $4}
|
||||
in ParamVar {region; value}
|
||||
}
|
||||
| Const var COLON type_expr {
|
||||
let stop = type_expr_to_region $4 in
|
||||
let region = cover $1 stop
|
||||
in ParamConst {region; value = $1,$2,$3,$4}
|
||||
and value = {
|
||||
kwd_const = $1;
|
||||
var = $2;
|
||||
colon = $3;
|
||||
param_type = $4}
|
||||
in ParamConst {region; value}
|
||||
}
|
||||
|
||||
entry_param_decl:
|
||||
param_decl {
|
||||
match $1 with
|
||||
ParamConst const -> EntryConst const
|
||||
| ParamVar var -> EntryVar var
|
||||
}
|
||||
| Storage var COLON type_expr {
|
||||
let stop = type_expr_to_region $4 in
|
||||
let region = cover $1 stop
|
||||
and value = {
|
||||
kwd_storage = $1;
|
||||
var = $2;
|
||||
colon = $3;
|
||||
storage_type = $4}
|
||||
in EntryStore {region; value}
|
||||
}
|
||||
|
||||
block:
|
||||
Begin
|
||||
instruction after_instr
|
||||
{
|
||||
let instrs, terminator, close = $3 in
|
||||
let region = cover $1 close in
|
||||
let value = {
|
||||
Begin series(instruction) {
|
||||
let first, (others, terminator, close) = $2 in
|
||||
let region = cover $1 close
|
||||
and value = {
|
||||
opening = $1;
|
||||
instr = (let value = $2, instrs in
|
||||
let region = nsepseq_to_region instr_to_region value
|
||||
in {value; region});
|
||||
instr = first, others;
|
||||
terminator;
|
||||
close}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
after_instr:
|
||||
SEMI instr_or_end {
|
||||
match $2 with
|
||||
`Some (instr, instrs, term, close) ->
|
||||
($1, instr)::instrs, term, close
|
||||
| `End close ->
|
||||
[], Some $1, close
|
||||
}
|
||||
| End {
|
||||
[], None, $1
|
||||
}
|
||||
|
||||
instr_or_end:
|
||||
End {
|
||||
`End $1 }
|
||||
| instruction after_instr {
|
||||
let instrs, term, close = $2 in
|
||||
`Some ($1, instrs, term, close)
|
||||
}
|
||||
|
||||
local_decl:
|
||||
lambda_decl { LocalLam $1 }
|
||||
| const_decl { LocalConst $1 }
|
||||
| var_decl { LocalVar $1 }
|
||||
|
||||
unqualified_decl(OP):
|
||||
var COLON type_expr OP extended_expr option(SEMI) {
|
||||
let stop = match $6 with
|
||||
Some region -> region
|
||||
| None -> $5.region in
|
||||
let init =
|
||||
match $5.value with
|
||||
`Expr e -> e
|
||||
| `EList (lbracket, rbracket) ->
|
||||
let region = $5.region
|
||||
and value = {
|
||||
lbracket;
|
||||
rbracket;
|
||||
colon = Region.ghost;
|
||||
list_type = $3} in
|
||||
let value = {
|
||||
lpar = Region.ghost;
|
||||
inside = value;
|
||||
rpar = Region.ghost} in
|
||||
EList (EmptyList {region; value})
|
||||
| `ENone region ->
|
||||
let value = {
|
||||
lpar = Region.ghost;
|
||||
inside = {
|
||||
c_None = region;
|
||||
colon = Region.ghost;
|
||||
opt_type = $3};
|
||||
rpar = Region.ghost}
|
||||
in EConstr (NoneExpr {region; value})
|
||||
| `EMap inj ->
|
||||
EMap (MapInj inj)
|
||||
in $1, $2, $3, $4, init, $6, stop
|
||||
}
|
||||
|
||||
const_decl:
|
||||
Const var COLON type_expr EQUAL expr option(SEMI) {
|
||||
let stop =
|
||||
match $7 with
|
||||
None -> expr_to_region $6
|
||||
| Some region -> region in
|
||||
Const unqualified_decl(EQUAL) {
|
||||
let name, colon, const_type, equal,
|
||||
init, terminator, stop = $2 in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_const = $1;
|
||||
name = $2;
|
||||
colon = $3;
|
||||
const_type = $4;
|
||||
equal = $5;
|
||||
init = $6;
|
||||
terminator = $7}
|
||||
name;
|
||||
colon;
|
||||
const_type;
|
||||
equal;
|
||||
init;
|
||||
terminator}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
var_decl:
|
||||
Var var COLON type_expr ASS expr option(SEMI) {
|
||||
let stop =
|
||||
match $7 with
|
||||
None -> expr_to_region $6
|
||||
| Some region -> region in
|
||||
Var unqualified_decl(ASS) {
|
||||
let name, colon, var_type, assign,
|
||||
init, terminator, stop = $2 in
|
||||
let region = cover $1 stop in
|
||||
let value = {
|
||||
kwd_var = $1;
|
||||
name = $2;
|
||||
colon = $3;
|
||||
var_type = $4;
|
||||
ass = $5;
|
||||
init = $6;
|
||||
terminator = $7}
|
||||
name;
|
||||
colon;
|
||||
var_type;
|
||||
assign;
|
||||
init;
|
||||
terminator}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
extended_expr:
|
||||
expr { {region = expr_to_region $1;
|
||||
value = `Expr $1} }
|
||||
| LBRACKET RBRACKET { {region = cover $1 $2;
|
||||
value = `EList ($1,$2)} }
|
||||
| C_None { {region = $1; value = `ENone $1} }
|
||||
| map_injection { {region = $1.region; value = `EMap $1} }
|
||||
|
||||
|
||||
instruction:
|
||||
single_instr { Single $1 }
|
||||
| block { Block $1 }
|
||||
|
||||
single_instr:
|
||||
conditional { Cond $1 }
|
||||
| match_instr { Match $1 }
|
||||
| ass { Ass $1 }
|
||||
| case_instr { Case $1 }
|
||||
| assignment { Assign $1 }
|
||||
| loop { Loop $1 }
|
||||
| proc_call { ProcCall $1 }
|
||||
| Null { Null $1 }
|
||||
| Fail expr { let region = cover $1 (expr_to_region $2)
|
||||
in Fail {region; value = $1,$2} }
|
||||
| fail_instr { Fail $1 }
|
||||
| Skip { Skip $1 }
|
||||
| record_patch { RecordPatch $1 }
|
||||
| map_patch { MapPatch $1 }
|
||||
|
||||
map_patch:
|
||||
Patch path With map_injection {
|
||||
let region = cover $1 $4.region in
|
||||
let value = {
|
||||
kwd_patch = $1;
|
||||
path = $2;
|
||||
kwd_with = $3;
|
||||
map_inj = $4}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
map_injection:
|
||||
Map series(binding) {
|
||||
let first, (others, terminator, close) = $2 in
|
||||
let region = cover $1 close
|
||||
and value = {
|
||||
opening = $1;
|
||||
bindings = first, others;
|
||||
terminator;
|
||||
close}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
binding:
|
||||
expr ARROW expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop
|
||||
and value = {
|
||||
source = $1;
|
||||
arrow = $2;
|
||||
image = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
record_patch:
|
||||
Patch path With record_injection {
|
||||
let region = cover $1 $4.region in
|
||||
let value = {
|
||||
kwd_patch = $1;
|
||||
path = $2;
|
||||
kwd_with = $3;
|
||||
record_inj = $4}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
fail_instr:
|
||||
Fail expr {
|
||||
let region = cover $1 (expr_to_region $2)
|
||||
and value = {kwd_fail = $1; fail_expr = $2}
|
||||
in {region; value}}
|
||||
|
||||
proc_call:
|
||||
fun_call { $1 }
|
||||
@ -400,13 +516,13 @@ conditional:
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
match_instr:
|
||||
Match expr With option(VBAR) cases End {
|
||||
case_instr:
|
||||
Case expr Of option(VBAR) cases End {
|
||||
let region = cover $1 $6 in
|
||||
let value = {
|
||||
kwd_match = $1;
|
||||
kwd_case = $1;
|
||||
expr = $2;
|
||||
kwd_with = $3;
|
||||
kwd_of = $3;
|
||||
lead_vbar = $4;
|
||||
cases = $5;
|
||||
kwd_end = $6}
|
||||
@ -421,16 +537,27 @@ cases:
|
||||
|
||||
case:
|
||||
pattern ARROW instruction {
|
||||
let region = cover $1.region (instr_to_region $3)
|
||||
in {region; value = $1,$2,$3}
|
||||
let region = cover (pattern_to_region $1) (instr_to_region $3)
|
||||
and value = {pattern = $1; arrow = $2; instr = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
ass:
|
||||
var ASS expr {
|
||||
let region = cover $1.region (expr_to_region $3)
|
||||
in {region; value = $1,$2,$3}
|
||||
assignment:
|
||||
lhs ASS rhs {
|
||||
let stop = rhs_to_region $3 in
|
||||
let region = cover (lhs_to_region $1) stop
|
||||
and value = {lhs = $1; assign = $2; rhs = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
rhs:
|
||||
expr { Expr $1 }
|
||||
| C_None { NoneExpr $1 : rhs }
|
||||
|
||||
lhs:
|
||||
path { Path $1 }
|
||||
| map_lookup { MapPath $1 }
|
||||
|
||||
loop:
|
||||
while_loop { $1 }
|
||||
| for_loop { $1 }
|
||||
@ -438,39 +565,45 @@ loop:
|
||||
while_loop:
|
||||
While expr block {
|
||||
let region = cover $1 $3.region
|
||||
in While {region; value=$1,$2,$3}
|
||||
and value = {
|
||||
kwd_while = $1;
|
||||
cond = $2;
|
||||
block = $3}
|
||||
in While {region; value}
|
||||
}
|
||||
|
||||
for_loop:
|
||||
For ass Down? To expr option(step_clause) block {
|
||||
For var_assign Down? To expr option(step_clause) block {
|
||||
let region = cover $1 $7.region in
|
||||
let value =
|
||||
{
|
||||
let value = {
|
||||
kwd_for = $1;
|
||||
ass = $2;
|
||||
assign = $2;
|
||||
down = $3;
|
||||
kwd_to = $4;
|
||||
bound = $5;
|
||||
step = $6;
|
||||
block = $7;
|
||||
}
|
||||
block = $7}
|
||||
in For (ForInt {region; value})
|
||||
}
|
||||
|
||||
| For var option(arrow_clause) In expr block {
|
||||
let region = cover $1 $6.region in
|
||||
let value =
|
||||
{
|
||||
let value = {
|
||||
kwd_for = $1;
|
||||
var = $2;
|
||||
bind_to = $3;
|
||||
kwd_in = $4;
|
||||
expr = $5;
|
||||
block = $6;
|
||||
}
|
||||
block = $6}
|
||||
in For (ForCollect {region; value})
|
||||
}
|
||||
|
||||
var_assign:
|
||||
var ASS expr {
|
||||
let region = cover $1.region (expr_to_region $3)
|
||||
and value = {name = $1; assign = $2; expr = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
step_clause:
|
||||
Step expr { $1,$2 }
|
||||
|
||||
@ -483,20 +616,22 @@ interactive_expr:
|
||||
expr EOF { $1 }
|
||||
|
||||
expr:
|
||||
expr OR conj_expr {
|
||||
expr Or conj_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Or {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3} in
|
||||
ELogic (BoolExpr (Or {region; value}))
|
||||
}
|
||||
| conj_expr { $1 }
|
||||
|
||||
conj_expr:
|
||||
conj_expr AND comp_expr {
|
||||
conj_expr And comp_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
And {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (BoolExpr (And {region; value}))
|
||||
}
|
||||
| comp_expr { $1 }
|
||||
|
||||
@ -504,38 +639,44 @@ comp_expr:
|
||||
comp_expr LT cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Lt {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Lt {region; value}))
|
||||
}
|
||||
| comp_expr LEQ cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Leq {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Leq {region; value}))
|
||||
}
|
||||
| comp_expr GT cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Gt {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Gt {region; value}))
|
||||
}
|
||||
| comp_expr GEQ cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Geq {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Geq {region; value}))
|
||||
}
|
||||
| comp_expr EQUAL cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Equal {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Equal {region; value}))
|
||||
}
|
||||
| comp_expr NEQ cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Neq {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in ELogic (CompExpr (Neq {region; value}))
|
||||
}
|
||||
| cat_expr { $1 }
|
||||
|
||||
@ -543,8 +684,9 @@ cat_expr:
|
||||
cons_expr CAT cat_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Cat {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EString (Cat {region; value})
|
||||
}
|
||||
| cons_expr { $1 }
|
||||
|
||||
@ -552,8 +694,9 @@ cons_expr:
|
||||
add_expr CONS cons_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Cons {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EList (Cons {region; value})
|
||||
}
|
||||
| add_expr { $1 }
|
||||
|
||||
@ -561,14 +704,16 @@ add_expr:
|
||||
add_expr PLUS mult_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Add {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EArith (Add {region; value})
|
||||
}
|
||||
| add_expr MINUS mult_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Sub {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EArith (Sub {region; value})
|
||||
}
|
||||
| mult_expr { $1 }
|
||||
|
||||
@ -576,68 +721,118 @@ mult_expr:
|
||||
mult_expr TIMES unary_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Mult {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EArith (Mult {region; value})
|
||||
}
|
||||
| mult_expr SLASH unary_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Div {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EArith (Div {region; value})
|
||||
}
|
||||
| mult_expr Mod unary_expr {
|
||||
let start = expr_to_region $1
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
Mod {region; value = $1,$2,$3}
|
||||
let region = cover start stop
|
||||
and value = {arg1 = $1; op = $2; arg2 = $3}
|
||||
in EArith (Mod {region; value})
|
||||
}
|
||||
| unary_expr { $1 }
|
||||
|
||||
unary_expr:
|
||||
MINUS core_expr {
|
||||
let stop = expr_to_region $2 in
|
||||
let region = cover $1 stop in
|
||||
Neg {region; value = $1,$2}
|
||||
let region = cover $1 stop
|
||||
and value = {op = $1; arg = $2}
|
||||
in EArith (Neg {region; value})
|
||||
}
|
||||
| Not core_expr {
|
||||
let stop = expr_to_region $2 in
|
||||
let region = cover $1 stop in
|
||||
Not {region; value = $1,$2}
|
||||
let region = cover $1 stop
|
||||
and value = {op = $1; arg = $2} in
|
||||
ELogic (BoolExpr (Not {region; value}))
|
||||
}
|
||||
| core_expr { $1 }
|
||||
|
||||
core_expr:
|
||||
Int { Int $1 }
|
||||
| var { Var $1 }
|
||||
| String { String $1 }
|
||||
| Bytes { Bytes $1 }
|
||||
| C_False { False $1 }
|
||||
| C_True { True $1 }
|
||||
| C_Unit { Unit $1 }
|
||||
| tuple { Tuple $1 }
|
||||
| list_expr { List $1 }
|
||||
| empty_list { EmptyList $1 }
|
||||
| set_expr { Set $1 }
|
||||
| empty_set { EmptySet $1 }
|
||||
| none_expr { NoneExpr $1 }
|
||||
| fun_call { FunCall $1 }
|
||||
Int { EArith (Int $1) }
|
||||
| var { EVar $1 }
|
||||
| String { EString (String $1) }
|
||||
| Bytes { EBytes $1 }
|
||||
| C_False { ELogic (BoolExpr (False $1)) }
|
||||
| C_True { ELogic (BoolExpr (True $1)) }
|
||||
| C_Unit { EUnit $1 }
|
||||
| tuple { ETuple $1 }
|
||||
| list_expr { EList (List $1) }
|
||||
| empty_list { EList (EmptyList $1) }
|
||||
| set_expr { ESet (Set $1) }
|
||||
| empty_set { ESet (EmptySet $1) }
|
||||
| none_expr { EConstr (NoneExpr $1) }
|
||||
| fun_call { ECall $1 }
|
||||
| map_expr { EMap $1 }
|
||||
| record_expr { ERecord $1 }
|
||||
| Constr arguments {
|
||||
let region = cover $1.region $2.region in
|
||||
ConstrApp {region; value = $1,$2}
|
||||
EConstr (ConstrApp {region; value = $1,$2})
|
||||
}
|
||||
| C_Some arguments {
|
||||
let region = cover $1 $2.region in
|
||||
SomeApp {region; value = $1,$2}
|
||||
EConstr (SomeApp {region; value = $1,$2})
|
||||
}
|
||||
| map_name DOT brackets(expr) {
|
||||
let region = cover $1.region $3.region in
|
||||
let value =
|
||||
{
|
||||
map_name = $1;
|
||||
|
||||
map_expr:
|
||||
map_lookup { MapLookUp $1 }
|
||||
|
||||
path:
|
||||
var { Name $1 }
|
||||
| record_projection { RecordPath $1 }
|
||||
|
||||
map_lookup:
|
||||
path brackets(expr) {
|
||||
let region = cover (path_to_region $1) $2.region in
|
||||
let value = {
|
||||
path = $1;
|
||||
index = $2}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
record_expr:
|
||||
record_injection { RecordInj $1 }
|
||||
| record_projection { RecordProj $1 }
|
||||
|
||||
record_injection:
|
||||
Record series(field_assignment) {
|
||||
let first, (others, terminator, close) = $2 in
|
||||
let region = cover $1 close
|
||||
and value = {
|
||||
opening = $1;
|
||||
fields = first, others;
|
||||
terminator;
|
||||
close}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
field_assignment:
|
||||
field_name EQUAL expr {
|
||||
let region = cover $1.region (expr_to_region $3)
|
||||
and value = {
|
||||
field_name = $1;
|
||||
equal = $2;
|
||||
field_expr = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
record_projection:
|
||||
record_name DOT nsepseq(field_name,DOT) {
|
||||
let stop = nsepseq_to_region (fun x -> x.region) $3 in
|
||||
let region = cover $1.region stop
|
||||
and value = {
|
||||
record_name = $1;
|
||||
selector = $2;
|
||||
index = $3;
|
||||
}
|
||||
in MapLookUp {region; value}
|
||||
field_path = $3}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
fun_call:
|
||||
@ -656,23 +851,46 @@ list_expr:
|
||||
brackets(nsepseq(expr,COMMA)) { $1 }
|
||||
|
||||
empty_list:
|
||||
par(LBRACKET RBRACKET COLON type_expr { $1,$2,$3,$4 }) { $1 }
|
||||
par(typed_empty_list) { $1 }
|
||||
|
||||
typed_empty_list:
|
||||
LBRACKET RBRACKET COLON type_expr {
|
||||
{lbracket = $1;
|
||||
rbracket = $2;
|
||||
colon = $3;
|
||||
list_type = $4}
|
||||
}
|
||||
|
||||
set_expr:
|
||||
braces(nsepseq(expr,COMMA)) { $1 }
|
||||
|
||||
empty_set:
|
||||
par(LBRACE RBRACE COLON type_expr { $1,$2,$3,$4 }) { $1 }
|
||||
par(typed_empty_set) { $1 }
|
||||
|
||||
typed_empty_set:
|
||||
LBRACE RBRACE COLON type_expr {
|
||||
{lbrace = $1;
|
||||
rbrace = $2;
|
||||
colon = $3;
|
||||
set_type = $4}
|
||||
}
|
||||
|
||||
none_expr:
|
||||
par(C_None COLON type_expr { $1,$2,$3 }) { $1 }
|
||||
par(typed_none_expr) { $1 }
|
||||
|
||||
typed_none_expr:
|
||||
C_None COLON type_expr {
|
||||
{c_None = $1;
|
||||
colon = $2;
|
||||
opt_type = $3}
|
||||
}
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
pattern:
|
||||
nsepseq(core_pattern,CONS) {
|
||||
let region = nsepseq_to_region core_pattern_to_region $1
|
||||
in {region; value=$1}
|
||||
let region = nsepseq_to_region pattern_to_region $1
|
||||
in PCons {region; value=$1}
|
||||
}
|
||||
|
||||
core_pattern:
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Driver for the parser of Ligo *)
|
||||
(* Driver for the parser of LIGO *)
|
||||
|
||||
open! EvalOpt (* Reads the command-line options: Effectful! *)
|
||||
|
||||
@ -25,14 +25,13 @@ let print_error ?(offsets=true) mode Region.{region; value} =
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(* Path to the Ligo standard library *)
|
||||
(*
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match EvalOpt.libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
*)
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
@ -52,9 +51,11 @@ let pp_input =
|
||||
let cpp_cmd =
|
||||
match EvalOpt.input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp - -o %s" pp_input
|
||||
Printf.sprintf "cpp -traditional-cpp%s - -o %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp %s -o %s" file pp_input
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s -o %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" EvalOpt.verbose
|
||||
@ -84,7 +85,7 @@ let tokeniser = read ~log
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.program tokeniser buffer in
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
if Utils.String.Set.mem "ast" EvalOpt.verbose
|
||||
then AST.print_tokens ast
|
||||
with
|
||||
@ -98,12 +99,14 @@ let () =
|
||||
print_error ~offsets EvalOpt.mode error
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
|
||||
(*
|
||||
(* Temporary: force dune to build AST2.ml *)
|
||||
let () =
|
||||
let open AST2 in
|
||||
let _ = s_ast in
|
||||
()
|
||||
|
||||
(*
|
||||
(* Temporary: force dune to build AST2.ml *)
|
||||
let () =
|
||||
if false then
|
||||
@ -111,3 +114,5 @@ let () =
|
||||
()
|
||||
else
|
||||
()
|
||||
*)
|
||||
*)
|
||||
|
@ -7,6 +7,8 @@ type t = <
|
||||
|
||||
set_file : string -> t;
|
||||
set_line : int -> t;
|
||||
set_offset : int -> t;
|
||||
set : file:string -> line:int -> offset:int -> t;
|
||||
new_line : string -> t;
|
||||
add_nl : t;
|
||||
|
||||
@ -44,8 +46,20 @@ let make ~byte ~point_num ~point_bol =
|
||||
val point_bol = point_bol
|
||||
method point_bol = point_bol
|
||||
|
||||
method set_file file = {< byte = Lexing.{byte with pos_fname = file} >}
|
||||
method set_line line = {< byte = Lexing.{byte with pos_lnum = line} >}
|
||||
method set_file file =
|
||||
{< byte = Lexing.{byte with pos_fname = file} >}
|
||||
|
||||
method set_line line =
|
||||
{< byte = Lexing.{byte with pos_lnum = line} >}
|
||||
|
||||
method set_offset offset =
|
||||
{< byte = Lexing.{byte with pos_cnum = byte.pos_bol + offset} >}
|
||||
|
||||
method set ~file ~line ~offset =
|
||||
let pos = self#set_file file in
|
||||
let pos = pos#set_line line in
|
||||
let pos = pos#set_offset offset
|
||||
in pos
|
||||
|
||||
(* The string must not contain '\n'. See [new_line]. *)
|
||||
|
||||
|
@ -36,6 +36,8 @@ type t = <
|
||||
|
||||
set_file : string -> t;
|
||||
set_line : int -> t;
|
||||
set_offset : int -> t;
|
||||
set : file:string -> line:int -> offset:int -> t;
|
||||
|
||||
(* The call [pos#new_line s], where the string [s] is either "\n" or
|
||||
"\c\r", updates the position [pos] with a new line. *)
|
||||
|
@ -10,6 +10,7 @@ type t = <
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
set_file : string -> t;
|
||||
|
||||
(* Getters *)
|
||||
|
||||
@ -55,6 +56,11 @@ let make ~(start: Pos.t) ~(stop: Pos.t) =
|
||||
and stop = stop#shift_one_uchar len
|
||||
in {< start = start; stop = stop >}
|
||||
|
||||
method set_file name =
|
||||
let start = start#set_file name
|
||||
and stop = stop#set_file name
|
||||
in {< start = start; stop = stop >}
|
||||
|
||||
(* Getters *)
|
||||
|
||||
method file = start#file
|
||||
|
@ -24,10 +24,12 @@ type t = <
|
||||
translation of region [region] of [n] bytes forward in the
|
||||
file. The call [region#shift_one_uchar n] is similar, except that
|
||||
it assumes that [n] is the number of bytes making up one unicode
|
||||
point. *)
|
||||
point. The call [region#set_file f] sets the file name to be
|
||||
[f]. *)
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
set_file : string -> t;
|
||||
|
||||
(* Getters *)
|
||||
|
||||
|
@ -18,7 +18,7 @@ module O = struct
|
||||
PVar of var_name
|
||||
| PWild
|
||||
| PInt of Z.t
|
||||
| PBytes of MBytes.t
|
||||
| PBytes of Hex.t
|
||||
| PString of string
|
||||
| PUnit
|
||||
| PFalse
|
||||
@ -42,15 +42,16 @@ module O = struct
|
||||
| Function of { arg: type_expr; ret: type_expr }
|
||||
| Ref of type_expr
|
||||
| String
|
||||
| Bytes
|
||||
| Int
|
||||
| Unit
|
||||
| Bool
|
||||
|
||||
and type_expr = { type_expr: type_expr_case; name: string option; orig: Region.t }
|
||||
and type_expr = { type_expr: type_expr_case; name: type_name option; orig: Region.t }
|
||||
|
||||
type typed_var = { name:var_name; ty:type_expr; orig: asttodo }
|
||||
|
||||
type type_decl = { name:string; ty:type_expr; orig: asttodo }
|
||||
type type_decl = { name: type_name; ty:type_expr; orig: asttodo }
|
||||
|
||||
type expr_case =
|
||||
App of { operator: operator; arguments: expr list }
|
||||
@ -84,7 +85,7 @@ module O = struct
|
||||
|
||||
and constant =
|
||||
Unit
|
||||
| Int of Z.t | String of string | Bytes of MBytes.t
|
||||
| Int of Z.t | String of string | Bytes of Hex.t
|
||||
| False | True
|
||||
| Null
|
||||
| EmptySet
|
||||
@ -117,29 +118,51 @@ let fold_map f a l =
|
||||
let last_acc, last_l = List.fold_left f (a, []) l
|
||||
in last_acc, List.rev last_l
|
||||
|
||||
let map f l = List.rev (List.rev_map f l)
|
||||
|
||||
let shadow (name : string) (typ : O.type_expr) (env : O.type_expr list SMap.t)
|
||||
: O.type_expr list SMap.t =
|
||||
SMap.update name (function None -> Some [typ] | Some tl -> Some (typ :: tl)) env
|
||||
|
||||
let lookup (name : string) (env : O.type_expr list SMap.t) : O.type_expr =
|
||||
match SMap.find name env with
|
||||
latest :: shadowed -> latest
|
||||
| [] -> failwith "Unbound variable"
|
||||
|
||||
let string_of_name ({name;_} : I.name_and_region) = name
|
||||
|
||||
let a_name_and_region ({name; orig} : I.name_and_region) : O.name_and_region =
|
||||
{name; orig}
|
||||
|
||||
let a_type_constructor (tve : tve) : I.type_constructor -> O.type_constructor = function
|
||||
Option -> failwith "TODO"
|
||||
| List -> failwith "TODO"
|
||||
| Set -> failwith "TODO"
|
||||
| Map -> failwith "TODO"
|
||||
Option -> Option
|
||||
| List -> List
|
||||
| Set -> Set
|
||||
| Map -> Map
|
||||
|
||||
let a_type_expr_case (tve : tve) : I.type_expr_case -> O.type_expr_case = function
|
||||
Sum l -> failwith "TODO"
|
||||
| Record l -> failwith "TODO"
|
||||
Sum lt -> failwith "TODO"
|
||||
| Record lt -> failwith "TODO"
|
||||
| TypeApp (tc, args) -> failwith "TODO"
|
||||
| Function {arg;ret} -> failwith "TODO"
|
||||
| Ref t -> failwith "TODO"
|
||||
| String -> failwith "TODO"
|
||||
| Int -> failwith "TODO"
|
||||
| Unit -> failwith "TODO"
|
||||
| Bool -> failwith "TODO"
|
||||
|
||||
| String -> String
|
||||
| Int -> Int
|
||||
| Unit -> Unit
|
||||
| Bool -> Bool
|
||||
|
||||
let a_type_expr (tve : tve) ({type_expr;name;orig} : I.type_expr) : O.type_expr =
|
||||
failwith "TODO"
|
||||
let type_expr = a_type_expr_case tve type_expr in
|
||||
let name = match name with
|
||||
None -> None
|
||||
|Some name -> Some (a_name_and_region name)
|
||||
in {type_expr;name;orig}
|
||||
|
||||
let a_type (tve : tve) ({name;ty;orig} : I.type_decl) : tve * O.type_decl =
|
||||
failwith "TODO"
|
||||
let a_type (te,ve : tve) ({name;ty;orig} : I.type_decl) : tve * O.type_decl =
|
||||
let ty = a_type_expr (te,ve) ty in
|
||||
let tve = shadow (string_of_name name) ty te, ve in
|
||||
let name = (a_name_and_region name) in
|
||||
tve, {name; ty; orig}
|
||||
|
||||
let a_types (tve : tve) (l : I.type_decl list) : tve * O.type_decl list =
|
||||
fold_map a_type tve l
|
||||
@ -147,9 +170,99 @@ let a_types (tve : tve) (l : I.type_decl list) : tve * O.type_decl list =
|
||||
let a_storage_decl : tve -> I.typed_var -> tve * O.typed_var =
|
||||
failwith "TODO"
|
||||
|
||||
let a_declarations : tve -> I.decl list -> tve * O.decl list =
|
||||
let type_expr_case_equal (t1 : O.type_expr_case) (t2 : O.type_expr_case) : bool = match t1,t2 with
|
||||
Sum m1, Sum m2 -> failwith "TODO" (* of (O.name_and_region * O.type_expr) SMap.t *)
|
||||
| Record m1, Record m2 -> failwith "TODO" (* of (O.name_and_region * O.type_expr) SMap.t *)
|
||||
| TypeApp (tc1, args1), TypeApp (tc2, args2) -> failwith "TODO" (* of O.type_constructor * O.type_expr list *)
|
||||
| Function {arg=arg1;ret=ret1}, Function {arg=arg2;ret=ret2} -> failwith "TODO" (* of { arg : O.type_expr; ret : O.type_expr; } *)
|
||||
| Ref t1, Ref t2 -> failwith "TODO" (* of O.type_expr *)
|
||||
| String, String -> true
|
||||
| Int, Int -> true
|
||||
| Unit, Unit -> true
|
||||
| Bool, Bool -> true
|
||||
| _ -> false
|
||||
|
||||
let type_expr_equal (t1 : O.type_expr) (t2 : O.type_expr) : bool =
|
||||
type_expr_case_equal t1.type_expr t2.type_expr
|
||||
|
||||
let check_type_expr_equal (expected : O.type_expr) (actual : O.type_expr) : unit =
|
||||
if type_expr_equal expected actual then
|
||||
()
|
||||
else
|
||||
failwith "got [actual] but expected [expected]"
|
||||
|
||||
let a_var_expr (te,ve : tve) (expected : O.type_expr) (var_name : I.name_and_region) : O.expr_case =
|
||||
check_type_expr_equal expected (lookup (string_of_name var_name) ve);
|
||||
Var { name = a_name_and_region var_name;
|
||||
ty = expected;
|
||||
orig = `TODO }
|
||||
|
||||
let a_constant_expr (tve : tve) (expected : O.type_expr) (constant : I.constant) : O.expr_case =
|
||||
let to_type_expr type_expr_case : O.type_expr =
|
||||
{ type_expr = type_expr_case; name = None; orig = Region.ghost } in
|
||||
let actual : O.type_expr = match constant with
|
||||
Unit -> to_type_expr Unit
|
||||
| Int _ -> to_type_expr Int
|
||||
| String _ -> to_type_expr String
|
||||
| Bytes _ -> to_type_expr Bytes
|
||||
| False -> to_type_expr Bool
|
||||
| True -> to_type_expr Bool
|
||||
| Null t -> a_type_expr tve t
|
||||
| EmptySet t -> a_type_expr tve t
|
||||
| CNone t -> a_type_expr tve t
|
||||
in
|
||||
check_type_expr_equal expected actual;
|
||||
let c : O.constant = match constant with
|
||||
Unit -> Unit
|
||||
| Int i -> Int i
|
||||
| String s -> String s
|
||||
| Bytes b -> Bytes b
|
||||
| False -> False
|
||||
| True -> True
|
||||
| Null _ -> Null
|
||||
| EmptySet _ -> EmptySet
|
||||
| CNone _ -> CNone
|
||||
in Constant c
|
||||
|
||||
let map_to_list m =
|
||||
List.rev (SMap.fold (fun field_name_string p l -> p :: l) m [])
|
||||
|
||||
let a_field tve (expected,expr) =
|
||||
failwith "TODO"
|
||||
|
||||
let a_record (tve : tve) (expected : O.type_expr) (record : (I.field_name * I.expr) list)
|
||||
: O.expr_case =
|
||||
let {type_expr = expected; _} : O.type_expr = expected in
|
||||
let expected = match expected with
|
||||
Record fields -> fields
|
||||
| _ -> failwith "expected some_type but got record" in
|
||||
let expected_and_field =
|
||||
List.combine
|
||||
(map_to_list expected)
|
||||
record (* TODO SHOULD BE (map_to_list record) *) in
|
||||
Record (map (a_field tve) expected_and_field)
|
||||
|
||||
let a_expr_case (te,ve : tve) (expected : O.type_expr) : I.expr -> O.expr_case = function
|
||||
App {operator;arguments} -> failwith "TODO"
|
||||
| Var var_name -> a_var_expr (te,ve) expected var_name
|
||||
| Constant constant -> a_constant_expr (te,ve) expected constant
|
||||
| Record record -> a_record (te,ve) expected record
|
||||
| Lambda lambda -> failwith "TODO"
|
||||
|
||||
let a_expr (te,ve : tve) (expected : O.type_expr) (e : I.expr) : O.expr =
|
||||
let expr_case = a_expr_case (te,ve) expected e in
|
||||
{ expr = expr_case; ty = expected; orig = `TODO }
|
||||
|
||||
let a_declaration (te,ve : tve) ({name;ty;value} : I.decl) : tve * O.decl =
|
||||
let ty = a_type_expr (te,ve) ty in
|
||||
let value = a_expr (te,ve) ty value in
|
||||
let ve = shadow (string_of_name name) ty ve in
|
||||
let name = a_name_and_region name in
|
||||
(te,ve), {var={name;ty;orig=`TODO};value;orig = `TODO}
|
||||
|
||||
let a_declarations (tve : tve) (l : I.decl list) : tve * O.decl list =
|
||||
fold_map a_declaration tve l
|
||||
|
||||
let a_ast I.{types; storage_decl; declarations; orig} =
|
||||
let tve = SMap.empty, SMap.empty in
|
||||
let tve, types = a_types tve types in
|
||||
@ -159,4 +272,3 @@ let a_ast I.{types; storage_decl; declarations; orig} =
|
||||
O.{types; storage_decl; declarations; orig}
|
||||
|
||||
let annotate : I.ast -> O.ast = a_ast
|
||||
|
||||
|
@ -16,7 +16,7 @@ module O : sig
|
||||
PVar of var_name
|
||||
| PWild
|
||||
| PInt of Z.t
|
||||
| PBytes of MBytes.t
|
||||
| PBytes of Hex.t
|
||||
| PString of string
|
||||
| PUnit
|
||||
| PFalse
|
||||
@ -40,15 +40,16 @@ module O : sig
|
||||
| Function of { arg: type_expr; ret: type_expr }
|
||||
| Ref of type_expr
|
||||
| String
|
||||
| Bytes
|
||||
| Int
|
||||
| Unit
|
||||
| Bool
|
||||
|
||||
and type_expr = { type_expr: type_expr_case; name: string option; orig: Region.t }
|
||||
and type_expr = { type_expr: type_expr_case; name: type_name option; orig: Region.t }
|
||||
|
||||
type typed_var = { name:var_name; ty:type_expr; orig: asttodo }
|
||||
|
||||
type type_decl = { name:string; ty:type_expr; orig: asttodo }
|
||||
type type_decl = { name:type_name; ty:type_expr; orig: asttodo }
|
||||
|
||||
type expr_case =
|
||||
App of { operator: operator; arguments: expr list }
|
||||
@ -82,7 +83,7 @@ module O : sig
|
||||
|
||||
and constant =
|
||||
Unit
|
||||
| Int of Z.t | String of string | Bytes of MBytes.t
|
||||
| Int of Z.t | String of string | Bytes of Hex.t
|
||||
| False | True
|
||||
| Null
|
||||
| EmptySet
|
||||
|
@ -6,31 +6,65 @@
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --explain --external-tokens LexToken))
|
||||
|
||||
(library
|
||||
(name ligo_parser)
|
||||
(public_name ligo-parser)
|
||||
(executables
|
||||
(names LexerMain ParserMain)
|
||||
(public_names ligo-lexer ligo-parser)
|
||||
(package ligo-parser)
|
||||
(modules_without_implementation Error)
|
||||
(libraries getopt hex str uutf zarith)
|
||||
)
|
||||
|
||||
;; (executables
|
||||
;; (names LexerMain ParserMain)
|
||||
;; (public_names ligo-lexer ligo-parser)
|
||||
;; (package ligo-parser)
|
||||
;; (modules_without_implementation Error)
|
||||
;; (libraries getopt hex str uutf zarith))
|
||||
(libraries getopt hex str uutf zarith))
|
||||
|
||||
;; Les deux directives (rule) qui suivent sont pour le dev local.
|
||||
;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier.
|
||||
;; Pour le purger, il faut faire "dune clean".
|
||||
;; (rule
|
||||
;; (targets Parser.exe)
|
||||
;; (deps ParserMain.exe)
|
||||
;; (action (copy ParserMain.exe Parser.exe))
|
||||
;; (mode promote-until-clean))
|
||||
(rule
|
||||
(targets Parser.exe)
|
||||
(deps ParserMain.exe)
|
||||
(action (copy ParserMain.exe Parser.exe))
|
||||
(mode promote-until-clean))
|
||||
|
||||
;; (rule
|
||||
;; (targets Lexer.exe)
|
||||
;; (deps LexerMain.exe)
|
||||
;; (action (copy LexerMain.exe Lexer.exe))
|
||||
;; (mode promote-until-clean))
|
||||
(rule
|
||||
(targets Lexer.exe)
|
||||
(deps LexerMain.exe)
|
||||
(action (copy LexerMain.exe Lexer.exe))
|
||||
(mode promote-until-clean))
|
||||
|
||||
(rule
|
||||
(targets dot_git_is_dir)
|
||||
(deps check_dot_git_is_dir.sh)
|
||||
(action (run "sh" "check_dot_git_is_dir.sh")))
|
||||
|
||||
(rule
|
||||
(targets .git_main_dir)
|
||||
(deps dot_git_is_dir check_dot_git_is_dir.sh)
|
||||
(action
|
||||
(progn (run "sh" "check_dot_git_is_dir.sh")
|
||||
(run "sh" "-c" "if \"$(cat dot_git_is_dir)\" = true; then printf %s '../../.git' > .git_main_dir; else cat ../../.git | sed -e 's/^gitdir: //' | sed -e 's|$|/../..|' > .git_main_dir; fi"))))
|
||||
|
||||
(rule
|
||||
(targets .git_worktree_dir)
|
||||
(deps dot_git_is_dir check_dot_git_is_dir.sh)
|
||||
(action
|
||||
(progn (run "sh" "check_dot_git_is_dir.sh")
|
||||
(run "sh" "-c" "if \"$(cat dot_git_is_dir)\" = true; then printf %s '../../.git' > .git_worktree_dir; else cat ../../.git | sed -e 's/^gitdir: //' > .git_worktree_dir; fi"))))
|
||||
|
||||
(rule
|
||||
(targets .gitHEAD)
|
||||
(deps .git_main_dir .git_worktree_dir check_dot_git_is_dir.sh)
|
||||
(action
|
||||
(progn (run "sh" "check_dot_git_is_dir.sh")
|
||||
(run "sh" "-c" "ln -s \"$(cat .git_worktree_dir)/HEAD\" .gitHEAD"))))
|
||||
|
||||
(rule
|
||||
(targets Version.gitHEAD)
|
||||
(deps .gitHEAD check_dot_git_is_dir.sh)
|
||||
(action
|
||||
(progn (run "sh" "check_dot_git_is_dir.sh")
|
||||
(run "sh" "-c" "if git symbolic-ref HEAD >/dev/null 2>&1; then ln -s \"$(cat .git_main_dir)/$(git symbolic-ref HEAD)\" Version.gitHEAD; else ln -s \"$(cat .git_worktree_dir)/HEAD\" Version.gitHEAD; fi"))))
|
||||
|
||||
(rule
|
||||
(targets Version.ml)
|
||||
(deps Version.gitHEAD check_dot_git_is_dir.sh)
|
||||
(action
|
||||
(progn (run "sh" "check_dot_git_is_dir.sh")
|
||||
(run "sh" "-c" "printf 'let version = \"%s\"'\\\\n \"$(git describe --always --dirty --abbrev=0)\" > Version.ml")))
|
||||
(mode promote-until-clean))
|
||||
|
@ -2,9 +2,9 @@ opam-version : "2.0"
|
||||
version : "1.0"
|
||||
maintainer : "gabriel.alfour@gmail.com"
|
||||
authors : [ "Galfour" ]
|
||||
homepage : "https://gitlab.com/gabriel.alfour/tezos"
|
||||
bug-reports : "https://gitlab.com/gabriel.alfour/tezos/issues"
|
||||
dev-repo : "git+https://gitlab.com/gabriel.alfour/tezos.git"
|
||||
homepage : "https://gitlab.com/gabriel.alfour/ligo-parser"
|
||||
bug-reports : "https://gitlab.com/gabriel.alfour/ligo-parser/issues"
|
||||
dev-repo : "git+https://gitlab.com/gabriel.alfour/ligo-parser.git"
|
||||
license : "MIT"
|
||||
|
||||
depends : [ "dune" "menhir" "hex" "zarith" "getopt" "uutf" ]
|
||||
@ -15,5 +15,5 @@ build : [
|
||||
]
|
||||
|
||||
url {
|
||||
src: "https://gitlab.com/gabriel.alfour/tezos/-/archive/master/tezos.tar.gz"
|
||||
src: "https://gitlab.com/gabriel.alfour/ligo-parser/-/archive/master/ligo-parser.tar.gz"
|
||||
}
|
||||
|
@ -2,13 +2,13 @@ open Ligo_parser
|
||||
|
||||
module Parser = Parser
|
||||
module Lexer = Lexer
|
||||
module CST = AST
|
||||
module AST = AST2
|
||||
module Typed = Typed
|
||||
module AST_Raw = AST
|
||||
module AST_Simplified = Ast_simplified
|
||||
module AST_Typed = Ast_typed
|
||||
module Mini_c = Mini_c
|
||||
|
||||
open Ligo_helpers.Trace
|
||||
let parse_file (source:string) : CST.t result =
|
||||
let parse_file (source:string) : AST_Raw.t result =
|
||||
let channel = open_in source in
|
||||
let lexbuf = Lexing.from_channel channel in
|
||||
let Lexer.{read ; _} =
|
||||
@ -25,10 +25,10 @@ let parse_file (source:string) : CST.t result =
|
||||
simple_error str
|
||||
)
|
||||
| _ -> simple_error "unrecognized parse_ error"
|
||||
) @@ (fun () -> Parser.program read lexbuf) >>? fun program_cst ->
|
||||
) @@ (fun () -> Parser.contract read lexbuf) >>? fun program_cst ->
|
||||
ok program_cst
|
||||
|
||||
let parse (s:string) : CST.t result =
|
||||
let parse (s:string) : AST_Raw.t result =
|
||||
let lexbuf = Lexing.from_string s in
|
||||
let Lexer.{read ; _} =
|
||||
Lexer.open_token_stream None in
|
||||
@ -44,10 +44,12 @@ let parse (s:string) : CST.t result =
|
||||
simple_error str
|
||||
)
|
||||
| _ -> simple_error "unrecognized parse_ error"
|
||||
) @@ (fun () -> Parser.program read lexbuf) >>? fun program_cst ->
|
||||
) @@ (fun () -> Parser.contract read lexbuf) >>? fun program_cst ->
|
||||
ok program_cst
|
||||
|
||||
|
||||
let abstract (cst:CST.t) : AST.O.ast result = ok @@ AST.s_ast cst
|
||||
let abstract (p:AST_Raw.t) : AST_Simplified.program result = AST_Simplified.Simplify.program p
|
||||
|
||||
let annotate_types (ast:AST.O.ast) = ok @@ Typed.annotate ast
|
||||
let annotate_types (p:AST_Simplified.program) : AST_Typed.program result = Typer.type_program p
|
||||
|
||||
let transpile (p:AST_Typed.program) : Mini_c.program result = Transpiler.translate_program p
|
||||
|
File diff suppressed because it is too large
Load Diff
210
src/ligo/transpiler.ml
Normal file
210
src/ligo/transpiler.ml
Normal file
@ -0,0 +1,210 @@
|
||||
open! Ligo_helpers.Trace
|
||||
open Mini_c
|
||||
|
||||
module AST = Ast_typed
|
||||
|
||||
let list_of_map m = List.rev @@ Ligo_helpers.X_map.String.fold (fun _ v prev -> v :: prev) m []
|
||||
let kv_list_of_map m = List.rev @@ Ligo_helpers.X_map.String.fold (fun k v prev -> (k, v) :: prev) m []
|
||||
|
||||
let rec translate_type (t:AST.type_value) : type_value result =
|
||||
match t with
|
||||
| Type_constant ("bool", []) -> ok (`Base Bool)
|
||||
| Type_constant ("int", []) -> ok (`Base Int)
|
||||
| Type_constant ("string", []) -> ok (`Base String)
|
||||
| Type_sum m ->
|
||||
let node = Append_tree.of_list @@ list_of_map m in
|
||||
let aux a b : type_value result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Or (a, b))
|
||||
in
|
||||
Append_tree.fold_ne translate_type aux node
|
||||
| Type_record m ->
|
||||
let node = Append_tree.of_list @@ list_of_map m in
|
||||
let aux a b : type_value result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Pair (a, b))
|
||||
in
|
||||
Append_tree.fold_ne translate_type aux node
|
||||
| Type_tuple lst ->
|
||||
let node = Append_tree.of_list lst in
|
||||
let aux a b : type_value result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
ok (`Pair (a, b))
|
||||
in
|
||||
Append_tree.fold_ne translate_type aux node
|
||||
| _ -> simple_fail "todo"
|
||||
|
||||
let rec translate_block env (b:AST.block) : block result =
|
||||
let env' = Environment.extend env in
|
||||
let%bind instructions = bind_list @@ List.map (translate_instruction env) b in
|
||||
ok (instructions, env')
|
||||
|
||||
and translate_instruction (env:Environment.t) (i:AST.instruction) : statement result =
|
||||
match i with
|
||||
| Assignment {name;annotated_expression} ->
|
||||
let%bind expression = translate_annotated_expression env annotated_expression in
|
||||
ok @@ (Assignment (name, expression), env)
|
||||
| Matching (expr, Match_bool {match_true ; match_false}) ->
|
||||
let%bind expr' = translate_annotated_expression env expr in
|
||||
let%bind true_branch = translate_block env match_true in
|
||||
let%bind false_branch = translate_block env match_false in
|
||||
ok @@ (Cond (expr', true_branch, false_branch), env)
|
||||
| Loop (expr, body) ->
|
||||
let%bind expr' = translate_annotated_expression env expr in
|
||||
let%bind body' = translate_block env body in
|
||||
ok @@ (While (expr', body'), env)
|
||||
| _ -> simple_fail "todo"
|
||||
|
||||
and translate_annotated_expression (env:Environment.t) (ae:AST.annotated_expression) : expression result =
|
||||
let%bind tv = translate_type ae.type_annotation in
|
||||
match ae.expression with
|
||||
| Literal (Bool b) -> ok (Literal (`Bool b), tv, env)
|
||||
| Literal (Int n) -> ok (Literal (`Int n), tv, env)
|
||||
| Literal (Nat n) -> ok (Literal (`Nat n), tv, env)
|
||||
| Literal (Bytes s) -> ok (Literal (`Bytes s), tv, env)
|
||||
| Literal (String s) -> ok (Literal (`String s), tv, env)
|
||||
| Variable name -> ok (Var name, tv, env)
|
||||
| Constructor (m, param) ->
|
||||
let%bind (param'_expr, param'_tv, _) = translate_annotated_expression env ae in
|
||||
let%bind map_tv = AST.get_t_sum ae.type_annotation in
|
||||
let node_tv = Append_tree.of_list @@ kv_list_of_map map_tv in
|
||||
let%bind ae' =
|
||||
let leaf (k, tv) : (expression' option * type_value) result =
|
||||
if k = m then (
|
||||
let%bind _ =
|
||||
trace (simple_error "constructor parameter doesn't have expected type (shouldn't happen here)")
|
||||
@@ AST.type_value_eq (tv, param.type_annotation) in
|
||||
ok (Some (param'_expr), param'_tv)
|
||||
) else (
|
||||
let%bind tv = translate_type tv in
|
||||
ok (None, tv)
|
||||
) in
|
||||
let node a b : (expression' option * type_value) result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
match (a, b) with
|
||||
| (None, a), (None, b) -> ok (None, `Or (a, b))
|
||||
| (Some _, _), (Some _, _) -> simple_fail "several identical constructors in the same variant (shouldn't happen here)"
|
||||
| (Some v, a), (None, b) -> ok (Some (Predicate ("LEFT", [v, a, env])), `Or (a, b))
|
||||
| (None, a), (Some v, b) -> ok (Some (Predicate ("RIGHT", [v, b, env])), `Or (a, b))
|
||||
in
|
||||
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
|
||||
let%bind ae =
|
||||
trace_option (simple_error "constructor doesn't exist in claimed type (shouldn't happen here)")
|
||||
ae_opt in
|
||||
ok (ae, tv, env) in
|
||||
ok ae'
|
||||
| Tuple lst ->
|
||||
let node = Append_tree.of_list lst in
|
||||
let aux (a:expression result) (b:expression result) : expression result =
|
||||
let%bind (_, a_ty, _) as a = a in
|
||||
let%bind (_, b_ty, _) as b = b in
|
||||
ok (Predicate ("PAIR", [a; b]), `Pair(a_ty, b_ty), env)
|
||||
in
|
||||
Append_tree.fold_ne (translate_annotated_expression env) aux node
|
||||
| Tuple_accessor (tpl, ind) ->
|
||||
let%bind (tpl'_expr, _, _) = translate_annotated_expression env tpl in
|
||||
let%bind tpl_tv = AST.get_t_tuple ae.type_annotation in
|
||||
let node_tv = Append_tree.of_list @@ List.mapi (fun i a -> (a, i)) tpl_tv in
|
||||
let%bind ae' =
|
||||
let leaf (tv, i) : (expression' option * type_value) result =
|
||||
let%bind tv = translate_type tv in
|
||||
if i = ind then (
|
||||
ok (Some (tpl'_expr), tv)
|
||||
) else (
|
||||
ok (None, tv)
|
||||
) in
|
||||
let node a b : (expression' option * type_value) result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
match (a, b) with
|
||||
| (None, a), (None, b) -> ok (None, `Pair (a, b))
|
||||
| (Some _, _), (Some _, _) -> simple_fail "several identical indexes in the same tuple (shouldn't happen here)"
|
||||
| (Some v, a), (None, b) -> ok (Some (Predicate ("CAR", [v, a, env])), `Pair (a, b))
|
||||
| (None, a), (Some v, b) -> ok (Some (Predicate ("CDR", [v, b, env])), `Pair (a, b))
|
||||
in
|
||||
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
|
||||
let%bind ae = trace_option (simple_error "bad index in tuple (shouldn't happen here)")
|
||||
ae_opt in
|
||||
ok (ae, tv, env) in
|
||||
ok ae'
|
||||
| Record m ->
|
||||
let node = Append_tree.of_list @@ list_of_map m in
|
||||
let aux a b : expression result =
|
||||
let%bind (_, a_ty, _) as a = a in
|
||||
let%bind (_, b_ty, _) as b = b in
|
||||
ok (Predicate ("PAIR", [a; b]), `Pair(a_ty, b_ty), env)
|
||||
in
|
||||
Append_tree.fold_ne (translate_annotated_expression env) aux node
|
||||
| Record_accessor (r, key) ->
|
||||
let%bind (r'_expr, _, _) = translate_annotated_expression env r in
|
||||
let%bind r_tv = AST.get_t_record ae.type_annotation in
|
||||
let node_tv = Append_tree.of_list @@ kv_list_of_map r_tv in
|
||||
let%bind ae' =
|
||||
let leaf (key', tv) : (expression' option * type_value) result =
|
||||
let%bind tv = translate_type tv in
|
||||
if key = key' then (
|
||||
ok (Some (r'_expr), tv)
|
||||
) else (
|
||||
ok (None, tv)
|
||||
) in
|
||||
let node a b : (expression' option * type_value) result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
match (a, b) with
|
||||
| (None, a), (None, b) -> ok (None, `Pair (a, b))
|
||||
| (Some _, _), (Some _, _) -> simple_fail "several identical keys in the same record (shouldn't happen here)"
|
||||
| (Some v, a), (None, b) -> ok (Some (Predicate ("CAR", [v, a, env])), `Pair (a, b))
|
||||
| (None, a), (Some v, b) -> ok (Some (Predicate ("CDR", [v, b, env])), `Pair (a, b))
|
||||
in
|
||||
let%bind (ae_opt, tv) = Append_tree.fold_ne leaf node node_tv in
|
||||
let%bind ae = trace_option (simple_error "bad key in record (shouldn't happen here)")
|
||||
ae_opt in
|
||||
ok (ae, tv, env) in
|
||||
ok ae'
|
||||
| Constant (name, lst) ->
|
||||
let%bind lst' = bind_list @@ List.map (translate_annotated_expression env) lst in
|
||||
ok (Predicate (name, lst'), tv, env)
|
||||
| Lambda { binder ; input_type ; output_type ; body ; result } ->
|
||||
(* Try to type it in an empty env, if it succeeds, transpiles it as a quote value, else, as a closure expression. *)
|
||||
let%bind empty_env =
|
||||
let%bind input = translate_type input_type in
|
||||
ok Environment.(add (binder, input) empty) in
|
||||
match to_option (translate_block empty_env body), to_option (translate_annotated_expression empty_env result) with
|
||||
| Some body, Some result ->
|
||||
let capture_type = No_capture in
|
||||
let%bind input = translate_type input_type in
|
||||
let%bind output = translate_type output_type in
|
||||
let content = {binder;input;output;body;result;capture_type} in
|
||||
ok (Literal (`Function {capture=None;content}), tv, env)
|
||||
| _ ->
|
||||
(* Shallow capture. Capture the whole environment. Extend it with a new scope. Append it the input. *)
|
||||
let%bind input = translate_type input_type in
|
||||
let sub_env = Environment.extend env in
|
||||
let full_env = Environment.add (binder, input) sub_env in
|
||||
let%bind (_, post_env) as body = translate_block full_env body in
|
||||
let%bind result = translate_annotated_expression post_env result in
|
||||
let capture_type = Shallow_capture sub_env in
|
||||
let input = Environment.to_mini_c_type full_env in
|
||||
let%bind output = translate_type output_type in
|
||||
let content = {binder;input;output;body;result;capture_type} in
|
||||
ok (Function_expression content, tv, env)
|
||||
|
||||
let translate_declaration env (d:AST.declaration) : toplevel_statement result =
|
||||
match d with
|
||||
| Constant_declaration {name;annotated_expression} ->
|
||||
let%bind ((_, tv, _) as expression) = translate_annotated_expression env annotated_expression in
|
||||
let env' = Environment.add (name, tv) env in
|
||||
ok @@ ((name, expression), env')
|
||||
|
||||
let translate_program (lst:AST.program) : program result =
|
||||
let aux (prev:(toplevel_statement list * Environment.t) result) cur =
|
||||
let%bind (tl, env) = prev in
|
||||
let%bind ((_, env') as cur') = translate_declaration env cur in
|
||||
ok (cur' :: tl, env')
|
||||
in
|
||||
let%bind (statements, _) = List.fold_left aux (ok ([], Environment.empty)) lst in
|
||||
ok statements
|
Loading…
Reference in New Issue
Block a user