Refactoring of the parsers

* [CameLIGO/ReasonLIGO] The AST node [EAnnot] (expressions
    annotated by a type) now records the region in the source
    code for the colon.

  * [CameLIGO/ReasonLIGO/PascaLIGO] I added the syntax
     %token <...> TOKEN "lexeme"

  * [ReasonLIGO] I changed the AST nodes [Mtz] and [Str] to
    [Mutez] and [String], respectively (in accordance with the
    PascaLIGO front-end). I changed token [DOTDOTDOT] to
    [ELLIPSIS].

  * [ReasonLIGO] I added what was missing to make a loca build
    with my Makefile.
This commit is contained in:
Christian Rinderknecht 2019-12-15 17:46:08 +01:00
parent 6692643cc5
commit f8d6396fcd
21 changed files with 1196 additions and 1522 deletions

View File

@ -1 +0,0 @@
ocamlc: -w -42-40

View File

@ -226,7 +226,7 @@ and field_pattern = {
and expr =
ECase of expr case reg
| ECond of cond_expr reg
| EAnnot of (expr * type_expr) reg
| EAnnot of (expr * colon * type_expr) par reg
| ELogic of logic_expr
| EArith of arith_expr
| EString of string_expr

View File

@ -214,7 +214,7 @@ and field_pattern = {
and expr =
ECase of expr case reg (* p1 -> e1 | p2 -> e2 | ... *)
| ECond of cond_expr reg (* if e1 then e2 else e3 *)
| EAnnot of (expr * type_expr) reg (* e : t *)
| EAnnot of (expr * colon * type_expr) par reg (* (e : t) *)
| ELogic of logic_expr
| EArith of arith_expr
| EString of string_expr

View File

@ -5,76 +5,76 @@
(* Literals *)
%token <string Region.reg> String
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes
%token <(string * Z.t) Region.reg> Int
%token <(string * Z.t) Region.reg> Nat
%token <(string * Z.t) Region.reg> Mutez
%token <string Region.reg> Ident
%token <string Region.reg> Constr
%token <string Region.reg> String "<string>"
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <(string * Z.t) Region.reg> Int "<int>"
%token <(string * Z.t) Region.reg> Nat "<nat>"
%token <(string * Z.t) Region.reg> Mutez "<mutez>"
%token <string Region.reg> Ident "<ident>"
%token <string Region.reg> Constr "<constr>"
(* Symbols *)
%token <Region.t> MINUS
%token <Region.t> PLUS
%token <Region.t> SLASH
%token <Region.t> TIMES
%token <Region.t> MINUS "-"
%token <Region.t> PLUS "+"
%token <Region.t> SLASH "/"
%token <Region.t> TIMES "*"
%token <Region.t> LPAR
%token <Region.t> RPAR
%token <Region.t> LBRACKET
%token <Region.t> RBRACKET
%token <Region.t> LBRACE
%token <Region.t> RBRACE
%token <Region.t> LPAR "("
%token <Region.t> RPAR ")"
%token <Region.t> LBRACKET "["
%token <Region.t> RBRACKET "]"
%token <Region.t> LBRACE "{"
%token <Region.t> RBRACE "}"
%token <Region.t> ARROW
%token <Region.t> CONS
%token <Region.t> CAT
(*%token APPEND*)
%token <Region.t> DOT
%token <Region.t> ARROW "->"
%token <Region.t> CONS "::"
%token <Region.t> CAT "^"
(*%token <Region.t> APPEND "@" *)
%token <Region.t> DOT "."
%token <Region.t> COMMA
%token <Region.t> SEMI
%token <Region.t> COLON
%token <Region.t> VBAR
%token <Region.t> COMMA ","
%token <Region.t> SEMI ";"
%token <Region.t> COLON ":"
%token <Region.t> VBAR "|"
%token <Region.t> WILD
%token <Region.t> WILD "_"
%token <Region.t> EQ
%token <Region.t> NE
%token <Region.t> LT
%token <Region.t> GT
%token <Region.t> LE
%token <Region.t> GE
%token <Region.t> EQ "="
%token <Region.t> NE "<>"
%token <Region.t> LT "<"
%token <Region.t> GT ">"
%token <Region.t> LE "<="
%token <Region.t> GE ">="
%token <Region.t> BOOL_OR
%token <Region.t> BOOL_AND
%token <Region.t> BOOL_OR "||"
%token <Region.t> BOOL_AND "&&"
(* Keywords *)
(*%token And*)
%token <Region.t> Begin
%token <Region.t> Else
%token <Region.t> End
%token <Region.t> False
%token <Region.t> Fun
%token <Region.t> If
%token <Region.t> In
%token <Region.t> Let
%token <Region.t> Match
%token <Region.t> Mod
%token <Region.t> Not
%token <Region.t> Of
%token <Region.t> Or
%token <Region.t> Then
%token <Region.t> True
%token <Region.t> Type
%token <Region.t> With
%token <Region.t> Begin "begin"
%token <Region.t> Else "else"
%token <Region.t> End "end"
%token <Region.t> False "false"
%token <Region.t> Fun "fun"
%token <Region.t> If "if"
%token <Region.t> In "in"
%token <Region.t> Let "let"
%token <Region.t> Match "match"
%token <Region.t> Mod "mod"
%token <Region.t> Not "not"
%token <Region.t> Of "of"
%token <Region.t> Or "or"
%token <Region.t> Then "then"
%token <Region.t> True "true"
%token <Region.t> Type "type"
%token <Region.t> With "with"
(* Data constructors *)
%token <Region.t> C_None (* "None" *)
%token <Region.t> C_Some (* "Some" *)
%token <Region.t> C_None "None"
%token <Region.t> C_Some "Some"
(* Virtual tokens *)

View File

@ -41,24 +41,10 @@ sep_or_term_list(item,sep):
(* Compound constructs *)
par(X):
LPAR X RPAR {
"(" X ")" {
let region = cover $1 $3
and value = {
lpar = $1;
inside = $2;
rpar = $3}
in {region; value}
}
brackets(X):
LBRACKET X RBRACKET {
let region = cover $1 $3
and value = {
lbracket = $1;
inside = $2;
rbracket = $3}
in {region; value}
}
and value = {lpar=$1; inside=$2; rpar=$3}
in {region; value} }
(* Sequences
@ -69,12 +55,9 @@ brackets(X):
latter returns a pair made of the first parsed item (the parameter
[X]) and the rest of the sequence (possibly empty). This way, the
OCaml typechecker can keep track of this information along the
static control-flow graph. The rule [sepseq] parses possibly empty
sequences of items separated by some token (e.g., a comma), and
rule [nsepseq] is for non-empty such sequences. See module [Utils]
for the types corresponding to the semantic actions of those
rules.
*)
static control-flow graph. See module [Utils] for the types
corresponding to the semantic actions of those rules.
*)
(* Possibly empty sequence of items *)
@ -93,51 +76,36 @@ nsepseq(item,sep):
item { $1, [] }
| item sep nsepseq(item,sep) { let h,t = $3 in $1, ($2,h)::t }
(* Possibly empy separated sequence of items *)
sepseq(item,sep):
(**) { None }
| nsepseq(item,sep) { Some $1 }
(* Helpers *)
%inline type_name : Ident { $1 }
%inline field_name : Ident { $1 }
%inline module_name : Constr { $1 }
%inline struct_name : Ident { $1 }
%inline type_name : "<ident>" { $1 }
%inline field_name : "<ident>" { $1 }
%inline struct_name : "<ident>" { $1 }
%inline module_name : "<constr>" { $1 }
(* Non-empty comma-separated values (at least two values) *)
tuple(item):
item COMMA nsepseq(item,COMMA) {
let h,t = $3 in $1,($2,h)::t
}
item "," nsepseq(item,",") { let h,t = $3 in $1,($2,h)::t }
(* Possibly empty semicolon-separated values between brackets *)
list(item):
LBRACKET sep_or_term_list(item,SEMI) RBRACKET {
let elements, terminator = $2 in
let value = {
compound = Brackets ($1,$3);
elements = Some elements;
terminator} in
let region = cover $1 $3
in {value; region}
}
| LBRACKET RBRACKET {
let value = {
compound = Brackets ($1,$2);
elements = None;
terminator = None} in
let region = cover $1 $2
in {value; region}}
"[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3)
and region = cover $1 $3 in
let elements, terminator =
match $2 with
None -> None, None
| Some (elements, terminator) ->
Some elements, terminator in
let value = {compound; elements; terminator}
in {region; value} }
(* Main *)
contract:
declarations EOF {
{decl=$1; eof=$2} }
declarations EOF { {decl=$1; eof=$2} }
declarations:
declaration { $1,[] : AST.declaration Utils.nseq }
@ -150,7 +118,7 @@ declaration:
(* Type declarations *)
type_decl:
Type type_name EQ type_expr {
"type" type_name "=" type_expr {
let region = cover $1 (type_expr_to_region $4) in
let value = {
kwd_type = $1;
@ -160,253 +128,216 @@ type_decl:
in {region; value} }
type_expr:
cartesian { $1 }
| sum_type { TSum $1 }
| record_type { TRecord $1 }
cartesian | sum_type | record_type { $1 }
cartesian:
fun_type TIMES nsepseq(fun_type,TIMES) {
fun_type { $1 }
| fun_type "*" nsepseq(fun_type,"*") {
let value = Utils.nsepseq_cons $1 $2 $3 in
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value}
}
| fun_type { ($1 : type_expr) }
in TProd {region; value} }
fun_type:
core_type {
$1
}
| core_type ARROW fun_type {
core_type { $1 }
| core_type "->" fun_type {
let start = type_expr_to_region $1
and stop = type_expr_to_region $3 in
let region = cover start stop in
TFun {region; value=$1,$2,$3} }
core_type:
type_name {
TVar $1
}
| module_name DOT type_name {
type_name { TVar $1 }
| par(type_expr) { TPar $1 }
| module_name "." type_name {
let module_name = $1.value in
let type_name = $3.value in
let value = module_name ^ "." ^ type_name in
let region = cover $1.region $3.region
in TVar {region; value}
}
| arg=core_type constr=type_constr {
let start = type_expr_to_region arg in
let stop = constr.region in
let region = cover start stop in
let lpar, rpar = ghost, ghost in
let value = {lpar; inside=arg,[]; rpar} in
let arg = {value; region = start} in
TApp Region.{value = (constr,arg); region}
| core_type type_name {
let arg, constr = $1, $2 in
let start = type_expr_to_region arg
and stop = constr.region in
let region = cover start stop in
let lpar, rpar = ghost, ghost in
let value = {lpar; inside=arg,[]; rpar} in
let arg = {region=start; value}
in TApp {region; value = constr,arg}
}
| type_tuple type_constr {
let region = cover $1.region $2.region
in TApp {region; value = $2,$1}
}
| par(type_expr) {
TPar $1 }
type_constr:
type_name { $1 }
| type_tuple type_name {
let arg, constr = $1, $2 in
let region = cover arg.region constr.region
in TApp {region; value = constr,arg} }
type_tuple:
par(tuple(type_expr)) { $1 }
sum_type:
ioption(VBAR) nsepseq(variant,VBAR) {
ioption("|") nsepseq(variant,"|") {
let region = nsepseq_to_region (fun x -> x.region) $2
in {region; value=$2} }
in TSum {region; value=$2} }
variant:
Constr Of cartesian {
"<constr>" { {$1 with value={constr=$1; arg=None}} }
| "<constr>" "of" cartesian {
let region = cover $1.region (type_expr_to_region $3)
and value = {constr=$1; arg = Some ($2, $3)}
in {region; value}
}
| Constr {
{region=$1.region; value={constr=$1; arg=None}} }
and value = {constr=$1; arg = Some ($2,$3)}
in {region; value} }
record_type:
LBRACE sep_or_term_list(field_decl,SEMI) RBRACE {
"{" sep_or_term_list(field_decl,";") "}" {
let ne_elements, terminator = $2 in
let region = cover $1 $3
and value = {
compound = Braces ($1,$3);
ne_elements;
terminator}
in {region; value} }
and value = {compound = Braces ($1,$3); ne_elements; terminator}
in TRecord {region; value} }
field_decl:
field_name COLON type_expr {
field_name ":" type_expr {
let stop = type_expr_to_region $3 in
let region = cover $1.region stop
and value = {field_name = $1; colon = $2; field_type = $3}
and value = {field_name=$1; colon=$2; field_type=$3}
in {region; value} }
(* Top-level non-recursive definitions *)
let_declaration:
Let let_binding {
"let" let_binding {
let kwd_let = $1 in
let binding = $2 in
let value = kwd_let, binding in
let stop = expr_to_region binding.let_rhs in
let region = cover $1 stop
in {value; region} }
in {region; value} }
let_binding:
Ident nseq(sub_irrefutable) type_annotation? EQ expr {
"<ident>" nseq(sub_irrefutable) type_annotation? "=" expr {
let binders = Utils.nseq_cons (PVar $1) $2 in
{binders; lhs_type=$3; eq=$4; let_rhs=$5}
}
| irrefutable type_annotation? EQ expr {
let binders = $1,[] in
{binders; lhs_type=$2; eq=$3; let_rhs=$4} }
| irrefutable type_annotation? "=" expr {
{binders=$1,[]; lhs_type=$2; eq=$3; let_rhs=$4} }
type_annotation:
COLON type_expr { $1,$2 }
":" type_expr { $1,$2 }
(* Patterns *)
irrefutable:
tuple(sub_irrefutable) {
sub_irrefutable { $1 }
| tuple(sub_irrefutable) {
let hd, tl = $1 in
let start = pattern_to_region hd in
let stop = last fst tl in
let region = cover start stop
in PTuple {value=$1; region}
}
| sub_irrefutable { $1 }
in PTuple {region; value=$1} }
sub_irrefutable:
Ident { PVar $1 }
| WILD { PWild $1 }
"<ident>" { PVar $1 }
| "_" { PWild $1 }
| unit { PUnit $1 }
| record_pattern { PRecord $1 }
| par(closed_irrefutable) { PPar $1 }
| Constr {
let value = $1, None
and region = $1.region in PConstr (PConstrApp {value; region}) }
| "<constr>" { PConstr (PConstrApp {$1 with value = $1,None}) }
closed_irrefutable:
irrefutable {
$1 }
| Constr sub_pattern {
irrefutable
| typed_pattern { $1 }
| "<constr>" sub_pattern {
let stop = pattern_to_region $2 in
let region = cover $1.region stop
and value = $1, Some $2
in PConstr (PConstrApp {value; region}) }
| typed_pattern {
PTyped $1 }
and value = $1, Some $2 in
PConstr (PConstrApp {region; value}) }
typed_pattern:
irrefutable COLON type_expr {
irrefutable ":" type_expr {
let start = pattern_to_region $1 in
let stop = type_expr_to_region $3 in
let region = cover start stop in
let value = {
pattern = $1;
colon = $2;
type_expr = $3}
in {value; region} }
let value = {pattern=$1; colon=$2; type_expr=$3}
in PTyped {region; value} }
pattern:
sub_pattern CONS tail {
let start = pattern_to_region $1 in
let stop = pattern_to_region $3 in
let region = cover start stop
and value = $1, $2, $3 in
PList (PCons {region; value})
core_pattern { $1 }
| sub_pattern "::" tail {
let start = pattern_to_region $1 in
let stop = pattern_to_region $3 in
let region = cover start stop in
PList (PCons {region; value=$1,$2,$3})
}
| tuple(sub_pattern) {
let start = pattern_to_region (fst $1) in
let stop = last fst (snd $1) in
let region = cover start stop
in PTuple {value=$1; region}
}
| core_pattern { $1 }
in PTuple {region; value=$1} }
sub_pattern:
par(tail) { PPar $1 }
| core_pattern { $1 }
core_pattern:
Ident { PVar $1 }
| WILD { PWild $1 }
| Int { PInt $1 }
| Nat { PNat $1 }
| Bytes { PBytes $1 }
| String { PString $1 }
| unit { PUnit $1 }
| False { PFalse $1 }
| True { PTrue $1 }
| par(ptuple) { PPar $1 }
"<ident>" { PVar $1 }
| "_" { PWild $1 }
| "<int>" { PInt $1 }
| "<nat>" { PNat $1 }
| "<bytes>" { PBytes $1 }
| "<string>" { PString $1 }
| unit { PUnit $1 }
| "false" { PFalse $1 }
| "true" { PTrue $1 }
| par(ptuple) { PPar $1 }
| list(tail) { PList (PListComp $1) }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
record_pattern:
LBRACE sep_or_term_list(field_pattern,SEMI) RBRACE {
"{" sep_or_term_list(field_pattern,";") "}" {
let ne_elements, terminator = $2 in
let region = cover $1 $3 in
let value = {
compound = Braces ($1,$3);
ne_elements;
terminator}
let value = {compound = Braces ($1,$3); ne_elements; terminator}
in {region; value} }
field_pattern:
field_name EQ sub_pattern {
field_name "=" sub_pattern {
let start = $1.region
and stop = pattern_to_region $3 in
let region = cover start stop
and value = {field_name=$1; eq=$2; pattern=$3}
in {value; region} }
in {region; value} }
constr_pattern:
C_None { PNone $1 }
| C_Some sub_pattern {
"None" { PNone $1 }
| "Some" sub_pattern {
let stop = pattern_to_region $2 in
let region = cover $1 stop
and value = $1, $2
in PSomeApp {value; region}
}
| Constr sub_pattern? {
let start = $1.region in
let stop =
match $2 with
Some p -> pattern_to_region p
| None -> start in
let region = cover start stop
and value = $1,$2
in PConstrApp {value; region} }
in PSomeApp {region; value}
}
| "<constr>" {
PConstrApp {$1 with value=$1,None}
}
| "<constr>" sub_pattern {
let region = cover $1.region (pattern_to_region $2)
in PConstrApp {region; value = $1, Some $2} }
ptuple:
tuple(tail) {
let h, t = $1 in
let start = pattern_to_region h in
let stop = last fst t in
let region = cover start stop in
PTuple {value = $1; region} }
let hd, tl = $1 in
let start = pattern_to_region hd in
let stop = last fst tl in
let region = cover start stop
in PTuple {region; value=$1} }
unit:
LPAR RPAR {
let value = ghost, ghost in
let region = cover $1 $2
in {value; region} }
"(" ")" { {region = cover $1 $2; value = ghost, ghost} }
tail:
sub_pattern CONS tail {
let start = pattern_to_region $1 in
let stop = pattern_to_region $3 in
sub_pattern { $1 }
| sub_pattern "::" tail {
let start = pattern_to_region $1 in
let stop = pattern_to_region $3 in
let region = cover start stop in
PList (PCons {value = ($1, $2, $3); region} )
}
| sub_pattern {
$1 }
PList (PCons {region; value=$1,$2,$3}) }
(* Expressions *)
@ -414,72 +345,67 @@ interactive_expr:
expr EOF { $1 }
expr:
base_cond__open(expr) { $1 }
| match_expr(base_cond) { ECase $1 }
base_cond__open(expr) | match_expr(base_cond) { $1 }
base_cond__open(x):
base_expr(x)
| conditional(x) { $1 }
base_expr(x) | conditional(x) { $1 }
base_cond:
base_cond__open(base_cond) { $1 }
base_expr(right_expr):
tuple_expr
| let_expr(right_expr)
| fun_expr(right_expr)
| disj_expr_level { $1 }
tuple_expr:
tuple(disj_expr_level) {
let start = expr_to_region (fst $1) in
let stop = last fst (snd $1) in
let region = cover start stop
in ETuple {value=$1; region}
}
| let_expr(right_expr)
| fun_expr(right_expr)
| disj_expr_level {
$1 }
in ETuple {region; value=$1} }
conditional(right_expr):
if_then_else(right_expr)
| if_then(right_expr) { ECond $1 }
if_then(right_expr):
If expr Then right_expr {
let the_unit = ghost, ghost in
let ifnot = EUnit {region=ghost; value=the_unit} in
let stop = expr_to_region $4 in
let region = cover $1 stop in
let value = {
kwd_if = $1;
test = $2;
kwd_then = $3;
ifso = $4;
kwd_else = ghost;
ifnot}
in {value; region} }
if_then_else(right_expr) | if_then(right_expr) { $1 }
if_then_else(right_expr):
If expr Then closed_if Else right_expr {
"if" expr "then" closed_if "else" right_expr {
let region = cover $1 (expr_to_region $6)
and value = {
kwd_if = $1;
test = $2;
kwd_then = $3;
ifso = $4;
kwd_else = $5;
ifnot = $6}
in {value; region} }
and value = {kwd_if = $1;
test = $2;
kwd_then = $3;
ifso = $4;
kwd_else = $5;
ifnot = $6}
in ECond {region; value} }
if_then(right_expr):
"if" expr "then" right_expr {
let the_unit = ghost, ghost in
let ifnot = EUnit (wrap_ghost the_unit) in
let stop = expr_to_region $4 in
let region = cover $1 stop in
let value = {kwd_if = $1;
test = $2;
kwd_then = $3;
ifso = $4;
kwd_else = ghost;
ifnot}
in ECond {region; value} }
base_if_then_else__open(x):
base_expr(x) { $1 }
| if_then_else(x) { ECond $1 }
base_expr(x) | if_then_else(x) { $1 }
base_if_then_else:
base_if_then_else__open(base_if_then_else) { $1 }
base_if_then_else__open(base_if_then_else) { $1 }
closed_if:
base_if_then_else__open(closed_if) { $1 }
| match_expr(base_if_then_else) { ECase $1 }
base_if_then_else__open(closed_if)
| match_expr(base_if_then_else) { $1 }
match_expr(right_expr):
Match expr With VBAR? cases(right_expr) {
"match" expr "with" "|"? cases(right_expr) {
let cases = {
value = Utils.nsepseq_rev $5;
region = nsepseq_to_region (fun x -> x.region) $5}
@ -488,192 +414,144 @@ match_expr(right_expr):
{region; _}, [] -> region
| _, tl -> last fst tl in
let region = cover $1 stop
and value = {
kwd_match = $1;
expr = $2;
kwd_with = $3;
lead_vbar = $4;
cases}
in {value; region} }
and value = {kwd_match = $1;
expr = $2;
kwd_with = $3;
lead_vbar = $4;
cases}
in ECase {region; value} }
cases(right_expr):
case_clause(right_expr) {
let start = pattern_to_region $1.pattern
and stop = expr_to_region $1.rhs in
let region = cover start stop
in {value=$1; region}, []
in {region; value=$1}, []
}
| cases(base_cond) VBAR case_clause(right_expr) {
| cases(base_cond) "|" case_clause(right_expr) {
let start =
match $1 with
only_case, [] -> only_case.region
| _, other_cases -> last fst other_cases
and stop = expr_to_region $3.rhs in
let region = cover start stop in
let fst_case = {value=$3; region}
and stop = expr_to_region $3.rhs in
let region = cover start stop in
let fst_case = {region; value=$3}
and snd_case, others = $1
in fst_case, ($2,snd_case)::others }
case_clause(right_expr):
pattern ARROW right_expr {
{pattern=$1; arrow=$2; rhs=$3} }
pattern "->" right_expr { {pattern=$1; arrow=$2; rhs=$3} }
let_expr(right_expr):
Let let_binding In right_expr {
let kwd_let = $1 in
let binding = $2 in
let kwd_in = $3 in
let body = $4 in
let stop = expr_to_region $4 in
let region = cover $1 stop in
let let_in = {kwd_let; binding; kwd_in; body}
in ELetIn {region; value=let_in} }
"let" let_binding "in" right_expr {
let kwd_let = $1
and binding = $2
and kwd_in = $3
and body = $4 in
let stop = expr_to_region body in
let region = cover kwd_let stop
and value = {kwd_let; binding; kwd_in; body}
in ELetIn {region; value} }
fun_expr(right_expr):
Fun nseq(irrefutable) ARROW right_expr {
"fun" nseq(irrefutable) "->" right_expr {
let stop = expr_to_region $4 in
let region = cover $1 stop in
let f = {
kwd_fun = $1;
binders = $2;
lhs_type = None;
arrow = $3;
body = $4}
in EFun {region; value=f} }
let value = {kwd_fun = $1;
binders = $2;
lhs_type = None;
arrow = $3;
body = $4}
in EFun {region; value} }
disj_expr_level:
disj_expr { ELogic (BoolExpr (Or $1)) }
| conj_expr_level { $1 }
bin_op(disj_expr_level, "||", conj_expr_level)
| bin_op(disj_expr_level, "or", conj_expr_level) {
ELogic (BoolExpr (Or $1)) }
| conj_expr_level { $1 }
bin_op(arg1,op,arg2):
arg1 op arg2 {
let start = expr_to_region $1 in
let stop = expr_to_region $3 in
let region = cover start stop in
{value={arg1=$1; op=$2; arg2=$3}; region}
}
disj_expr:
bin_op(disj_expr_level, BOOL_OR, conj_expr_level)
| bin_op(disj_expr_level, Or, conj_expr_level) { $1 }
let region = cover start stop
and value = {arg1=$1; op=$2; arg2=$3}
in {region; value} }
conj_expr_level:
conj_expr { ELogic (BoolExpr (And $1)) }
| comp_expr_level { $1 }
conj_expr:
bin_op(conj_expr_level, BOOL_AND, comp_expr_level) { $1 }
bin_op(conj_expr_level, "&&", comp_expr_level) {
ELogic (BoolExpr (And $1)) }
| comp_expr_level { $1 }
comp_expr_level:
lt_expr { ELogic (CompExpr (Lt $1)) }
| le_expr { ELogic (CompExpr (Leq $1)) }
| gt_expr { ELogic (CompExpr (Gt $1)) }
| ge_expr { ELogic (CompExpr (Geq $1)) }
| eq_expr { ELogic (CompExpr (Equal $1)) }
| ne_expr { ELogic (CompExpr (Neq $1)) }
| cat_expr_level { $1 }
lt_expr:
bin_op(comp_expr_level, LT, cat_expr_level) { $1 }
le_expr:
bin_op(comp_expr_level, LE, cat_expr_level) { $1 }
gt_expr:
bin_op(comp_expr_level, GT, cat_expr_level) { $1 }
ge_expr:
bin_op(comp_expr_level, GE, cat_expr_level) { $1 }
eq_expr:
bin_op(comp_expr_level, EQ, cat_expr_level) { $1 }
ne_expr:
bin_op(comp_expr_level, NE, cat_expr_level) { $1 }
bin_op(comp_expr_level, "<", cat_expr_level) {
ELogic (CompExpr (Lt $1)) }
| bin_op(comp_expr_level, "<=", cat_expr_level) {
ELogic (CompExpr (Leq $1)) }
| bin_op(comp_expr_level, ">", cat_expr_level) {
ELogic (CompExpr (Gt $1)) }
| bin_op(comp_expr_level, ">=", cat_expr_level) {
ELogic (CompExpr (Geq $1)) }
| bin_op(comp_expr_level, "=", cat_expr_level) {
ELogic (CompExpr (Equal $1)) }
| bin_op(comp_expr_level, "<>", cat_expr_level) {
ELogic (CompExpr (Neq $1)) }
| cat_expr_level { $1 }
cat_expr_level:
cat_expr { EString (Cat $1) }
(*| reg(append_expr) { EList (Append $1) } *)
| cons_expr_level { $1 }
cat_expr:
bin_op(cons_expr_level, CAT, cat_expr_level) { $1 }
(*
append_expr:
cons_expr_level sym(APPEND) cat_expr_level { $1,$2,$3 }
*)
bin_op(cons_expr_level, "^", cat_expr_level) { EString (Cat $1) }
(*| reg(append_expr) {
bin_op(cons_expr_level, "@", cat_expr_level) { EList (Append $1) } *)
| cons_expr_level { $1 }
cons_expr_level:
cons_expr { EList (ECons $1) }
| add_expr_level { $1 }
cons_expr:
bin_op(add_expr_level, CONS, cons_expr_level) { $1 }
bin_op(add_expr_level, "::", cons_expr_level) { EList (ECons $1) }
| add_expr_level { $1 }
add_expr_level:
plus_expr { EArith (Add $1) }
| minus_expr { EArith (Sub $1) }
| mult_expr_level { $1 }
plus_expr:
bin_op(add_expr_level, PLUS, mult_expr_level) { $1 }
minus_expr:
bin_op(add_expr_level, MINUS, mult_expr_level) { $1 }
bin_op(add_expr_level, "+", mult_expr_level) { EArith (Add $1) }
| bin_op(add_expr_level, "-", mult_expr_level) { EArith (Sub $1) }
| mult_expr_level { $1 }
mult_expr_level:
times_expr { EArith (Mult $1) }
| div_expr { EArith (Div $1) }
| mod_expr { EArith (Mod $1) }
| unary_expr_level { $1 }
times_expr:
bin_op(mult_expr_level, TIMES, unary_expr_level) { $1 }
div_expr:
bin_op(mult_expr_level, SLASH, unary_expr_level) { $1 }
mod_expr:
bin_op(mult_expr_level, Mod, unary_expr_level) { $1 }
bin_op(mult_expr_level, "*", unary_expr_level) { EArith (Mult $1) }
| bin_op(mult_expr_level, "/", unary_expr_level) { EArith (Div $1) }
| bin_op(mult_expr_level, "mod", unary_expr_level) { EArith (Mod $1) }
| unary_expr_level { $1 }
unary_expr_level:
MINUS call_expr_level {
call_expr_level { $1 }
| "-" call_expr_level {
let start = $1 in
let stop = expr_to_region $2 in
let region = cover start stop
and value = {op = $1; arg = $2}
in EArith (Neg {region; value}) }
| Not call_expr_level {
and value = {op=$1; arg=$2}
in EArith (Neg {region; value})
}
| "not" call_expr_level {
let start = $1 in
let stop = expr_to_region $2 in
let region = cover start stop
and value = {op = $1; arg = $2} in
and value = {op=$1; arg=$2} in
ELogic (BoolExpr (Not ({region; value}))) }
| call_expr_level {
$1 }
call_expr_level:
call_expr { ECall $1 }
| constr_expr { EConstr $1 }
| core_expr { $1 }
call_expr | constr_expr | core_expr { $1 }
constr_expr:
C_None {
ENone $1
"None" {
EConstr (ENone $1)
}
| C_Some core_expr {
| "Some" core_expr {
let region = cover $1 (expr_to_region $2)
in ESomeApp {value = $1,$2; region}
in EConstr (ESomeApp {region; value=$1,$2})
}
| Constr core_expr? {
let start = $1.region in
let stop =
match $2 with
Some c -> expr_to_region c
| None -> start in
let region = cover start stop
in EConstrApp {value=$1,$2; region} }
| "<constr>" core_expr {
let region = cover $1.region (expr_to_region $2) in
EConstr (EConstrApp {region; value=$1, Some $2})
}
| "<constr>" {
EConstr (EConstrApp {$1 with value=$1, None}) }
call_expr:
core_expr nseq(core_expr) {
@ -682,92 +560,77 @@ call_expr:
e, [] -> expr_to_region e
| _, l -> last expr_to_region l in
let region = cover start stop in
{value = $1,$2; region} }
ECall {region; value=$1,$2} }
core_expr:
Int { EArith (Int $1) }
| Mutez { EArith (Mutez $1) }
| Nat { EArith (Nat $1) }
| Ident | module_field { EVar $1 }
"<int>" { EArith (Int $1) }
| "<mutez>" { EArith (Mutez $1) }
| "<nat>" { EArith (Nat $1) }
| "<ident>" | module_field { EVar $1 }
| projection { EProj $1 }
| String { EString (String $1) }
| "<string>" { EString (String $1) }
| unit { EUnit $1 }
| False { ELogic (BoolExpr (False $1)) }
| True { ELogic (BoolExpr (True $1)) }
| "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) }
| list(expr) { EList (EListComp $1) }
| par(expr) { EPar $1 }
| sequence { ESeq $1 }
| record_expr { ERecord $1 }
| par(expr COLON type_expr {$1,$3}) {
EAnnot {$1 with value=$1.value.inside} }
| par(expr) { EPar $1 }
| par(expr ":" type_expr {$1,$2,$3}) { EAnnot $1 }
module_field:
module_name DOT field_name {
module_name "." field_name {
let region = cover $1.region $3.region in
{value = $1.value ^ "." ^ $3.value; region} }
{region; value = $1.value ^ "." ^ $3.value} }
projection:
struct_name DOT nsepseq(selection,DOT) {
struct_name "." nsepseq(selection,".") {
let start = $1.region in
let stop = nsepseq_to_region selection_to_region $3 in
let region = cover start stop in
let value = {
struct_name = $1;
selector = $2;
field_path = $3}
in {value; region}
let value = {struct_name=$1; selector=$2; field_path=$3}
in {region; value}
}
| module_name DOT field_name DOT nsepseq(selection,DOT) {
let value = $1.value ^ "." ^ $3.value in
| module_name "." field_name "." nsepseq(selection,".") {
let value = $1.value ^ "." ^ $3.value in
let struct_name = {$1 with value} in
let start = $1.region in
let stop = nsepseq_to_region selection_to_region $5 in
let region = cover start stop in
let value = {
struct_name;
selector = $4;
field_path = $5}
in {value; region} }
let start = $1.region in
let stop = nsepseq_to_region selection_to_region $5 in
let region = cover start stop in
let value = {struct_name; selector=$4; field_path=$5}
in {region; value} }
selection:
field_name { FieldName $1 }
| Int { Component $1 }
| "<int>" { Component $1 }
record_expr:
LBRACE sep_or_term_list(field_assignment,SEMI) RBRACE {
"{" sep_or_term_list(field_assignment,";") "}" {
let ne_elements, terminator = $2 in
let region = cover $1 $3 in
let value = {
compound = Braces ($1,$3);
ne_elements;
terminator}
in {value; region} }
let value = {compound = Braces ($1,$3);
ne_elements;
terminator}
in {region; value} }
field_assignment:
field_name EQ expr {
field_name "=" expr {
let start = $1.region in
let stop = expr_to_region $3 in
let region = cover start stop in
let value = {
field_name = $1;
assignment = $2;
field_expr = $3}
in {value; region} }
let value = {field_name = $1;
assignment = $2;
field_expr = $3}
in {region; value} }
sequence:
Begin sep_or_term_list(expr,SEMI) End {
let ne_elements, terminator = $2 in
let value = {
compound = BeginEnd ($1,$3);
elements = Some ne_elements;
terminator} in
let region = cover $1 $3
in {value; region}
}
| Begin End {
let value = {
compound = BeginEnd ($1,$2);
elements = None;
terminator = None} in
let region = cover $1 $2
in {value; region} }
"begin" sep_or_term_list(expr,";")? "end" {
let region = cover $1 $3
and compound = BeginEnd ($1,$3) in
let elements, terminator =
match $2 with
None -> None, None
| Some (ne_elements, terminator) ->
Some ne_elements, terminator in
let value = {compound; elements; terminator}
in {region; value} }

View File

@ -369,10 +369,13 @@ and print_fun_call state {value=f,l; _} =
print_expr state f;
Utils.nseq_iter (print_expr state) l
and print_annot_expr state {value=e,t; _} =
print_expr state e;
print_token state Region.ghost ":";
print_type_expr state t
and print_annot_expr state {value; _} =
let {lpar; inside=e,colon,t; rpar} = value in
print_token state lpar "(";
print_expr state e;
print_token state colon ":";
print_type_expr state t;
print_token state rpar ")"
and print_list_expr state = function
ECons {value={arg1;op;arg2}; _} ->
@ -738,7 +741,7 @@ and pp_expr state = function
pp_loc_node state "ECond" region;
pp_cond_expr state value
| EAnnot {value; region} ->
pp_loc_node state "EAnnot" region;
pp_loc_node state "EAnnot" region;
pp_annotated state value
| ELogic e_logic ->
pp_node state "ELogic";
@ -967,7 +970,8 @@ and pp_bin_op node region state op =
pp_expr (state#pad 2 0) op.arg1;
pp_expr (state#pad 2 1) op.arg2
and pp_annotated state (expr, t_expr) =
and pp_annotated state annot =
let expr, _, t_expr = annot.inside in
pp_expr (state#pad 2 0) expr;
pp_type_expr (state#pad 2 1) t_expr

View File

@ -35,7 +35,7 @@ type t =
| Bytes of (lexeme * Hex.t) Region.reg
| Int of (lexeme * Z.t) Region.reg
| Nat of (lexeme * Z.t) Region.reg
| Mutez of (lexeme * Z.t) Region.reg
| Mutez of (lexeme * Z.t) Region.reg
| Ident of lexeme Region.reg
| Constr of lexeme Region.reg

View File

@ -5,86 +5,86 @@
(* Literals *)
%token <LexToken.lexeme Region.reg> String
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes
%token <(LexToken.lexeme * Z.t) Region.reg> Int
%token <(LexToken.lexeme * Z.t) Region.reg> Nat
%token <(LexToken.lexeme * Z.t) Region.reg> Mutez
%token <LexToken.lexeme Region.reg> Ident
%token <LexToken.lexeme Region.reg> Constr
%token <LexToken.lexeme Region.reg> String "<string>"
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <(LexToken.lexeme * Z.t) Region.reg> Int "<int>"
%token <(LexToken.lexeme * Z.t) Region.reg> Nat "<nat>"
%token <(LexToken.lexeme * Z.t) Region.reg> Mutez "<mutez>"
%token <LexToken.lexeme Region.reg> Ident "<ident>"
%token <LexToken.lexeme Region.reg> Constr "<constr>"
(* Symbols *)
%token <Region.t> SEMI (* ";" *)
%token <Region.t> COMMA (* "," *)
%token <Region.t> LPAR (* "(" *)
%token <Region.t> RPAR (* ")" *)
%token <Region.t> LBRACE (* "{" *)
%token <Region.t> RBRACE (* "}" *)
%token <Region.t> LBRACKET (* "[" *)
%token <Region.t> RBRACKET (* "]" *)
%token <Region.t> CONS (* "#" *)
%token <Region.t> VBAR (* "|" *)
%token <Region.t> ARROW (* "->" *)
%token <Region.t> ASS (* ":=" *)
%token <Region.t> EQ (* "=" *)
%token <Region.t> COLON (* ":" *)
%token <Region.t> LT (* "<" *)
%token <Region.t> LE (* "<=" *)
%token <Region.t> GT (* ">" *)
%token <Region.t> GE (* ">=" *)
%token <Region.t> NE (* "=/=" *)
%token <Region.t> PLUS (* "+" *)
%token <Region.t> MINUS (* "-" *)
%token <Region.t> SLASH (* "/" *)
%token <Region.t> TIMES (* "*" *)
%token <Region.t> DOT (* "." *)
%token <Region.t> WILD (* "_" *)
%token <Region.t> CAT (* "^" *)
%token <Region.t> SEMI ";"
%token <Region.t> COMMA ","
%token <Region.t> LPAR "("
%token <Region.t> RPAR ")"
%token <Region.t> LBRACE "{"
%token <Region.t> RBRACE "}"
%token <Region.t> LBRACKET "["
%token <Region.t> RBRACKET "]"
%token <Region.t> CONS "#"
%token <Region.t> VBAR "|"
%token <Region.t> ARROW "->"
%token <Region.t> ASS ":="
%token <Region.t> EQ "="
%token <Region.t> COLON ":"
%token <Region.t> LT "<"
%token <Region.t> LE "<="
%token <Region.t> GT ">"
%token <Region.t> GE ">="
%token <Region.t> NE "=/="
%token <Region.t> PLUS "+"
%token <Region.t> MINUS "-"
%token <Region.t> SLASH "/"
%token <Region.t> TIMES "*"
%token <Region.t> DOT "."
%token <Region.t> WILD "_"
%token <Region.t> CAT "^"
(* Keywords *)
%token <Region.t> And (* "and" *)
%token <Region.t> Begin (* "begin" *)
%token <Region.t> BigMap (* "big_map" *)
%token <Region.t> Block (* "block" *)
%token <Region.t> Case (* "case" *)
%token <Region.t> Const (* "const" *)
%token <Region.t> Contains (* "contains" *)
%token <Region.t> Else (* "else" *)
%token <Region.t> End (* "end" *)
%token <Region.t> False (* "False" *)
%token <Region.t> For (* "for" *)
%token <Region.t> Function (* "function" *)
%token <Region.t> From (* "from" *)
%token <Region.t> If (* "if" *)
%token <Region.t> In (* "in" *)
%token <Region.t> Is (* "is" *)
%token <Region.t> List (* "list" *)
%token <Region.t> Map (* "map" *)
%token <Region.t> Mod (* "mod" *)
%token <Region.t> Nil (* "nil" *)
%token <Region.t> Not (* "not" *)
%token <Region.t> Of (* "of" *)
%token <Region.t> Or (* "or" *)
%token <Region.t> Patch (* "patch" *)
%token <Region.t> Record (* "record" *)
%token <Region.t> Remove (* "remove" *)
%token <Region.t> Set (* "set" *)
%token <Region.t> Skip (* "skip" *)
%token <Region.t> Then (* "then" *)
%token <Region.t> To (* "to" *)
%token <Region.t> True (* "True" *)
%token <Region.t> Type (* "type" *)
%token <Region.t> Unit (* "Unit" *)
%token <Region.t> Var (* "var" *)
%token <Region.t> While (* "while" *)
%token <Region.t> With (* "with" *)
%token <Region.t> And "and"
%token <Region.t> Begin "begin"
%token <Region.t> BigMap "big_map"
%token <Region.t> Block "block"
%token <Region.t> Case "case"
%token <Region.t> Const "const"
%token <Region.t> Contains "contains"
%token <Region.t> Else "else"
%token <Region.t> End "end"
%token <Region.t> False "False"
%token <Region.t> For "for"
%token <Region.t> Function "function"
%token <Region.t> From "from"
%token <Region.t> If "if"
%token <Region.t> In "in"
%token <Region.t> Is "is"
%token <Region.t> List "list"
%token <Region.t> Map "map"
%token <Region.t> Mod "mod"
%token <Region.t> Nil "nil"
%token <Region.t> Not "not"
%token <Region.t> Of "of"
%token <Region.t> Or "or"
%token <Region.t> Patch "patch"
%token <Region.t> Record "record"
%token <Region.t> Remove "remove"
%token <Region.t> Set "set"
%token <Region.t> Skip "skip"
%token <Region.t> Then "then"
%token <Region.t> To "to"
%token <Region.t> True "True"
%token <Region.t> Type "type"
%token <Region.t> Unit "Unit"
%token <Region.t> Var "var"
%token <Region.t> While "while"
%token <Region.t> With "with"
(* Data constructors *)
%token <Region.t> C_None (* "None" *)
%token <Region.t> C_Some (* "Some" *)
%token <Region.t> C_None "None"
%token <Region.t> C_Some "Some"
(* Virtual tokens *)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,2 @@
module Region = Region
module Pos = Pos

View File

@ -0,0 +1 @@
--explain --external-tokens LexToken --base Parser ParToken.mly

View File

@ -0,0 +1,25 @@
$HOME/git/OCaml-build/Makefile
$HOME/git/OCaml-build/Makefile.cfg
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Lexer.mli
../shared/Lexer.mll
../shared/Error.mli
../shared/EvalOpt.ml
../shared/EvalOpt.mli
../shared/FQueue.ml
../shared/FQueue.mli
../shared/LexerLog.mli
../shared/LexerLog.ml
../shared/Markup.ml
../shared/Markup.mli
../shared/Utils.mli
../shared/Utils.ml
Stubs/Simple_utils.ml
Stubs/Parser_cameligo.ml
../cameligo/AST.mli
../cameligo/AST.ml
../cameligo/ParserLog.mli
../cameligo/ParserLog.ml

View File

@ -56,7 +56,7 @@ type t =
| VBAR of Region.t (* "|" *)
| COLON of Region.t (* ":" *)
| DOT of Region.t (* "." *)
| DOTDOTDOT of Region.t (* "..." *)
| ELLIPSIS of Region.t (* "..." *)
(* Wildcard *)
@ -65,18 +65,18 @@ type t =
(* Comparisons *)
| EQ of Region.t (* "=" *)
| EQEQ of Region.t (* "=" *)
| NE of Region.t (* "!=" *)
| LT of Region.t (* "<" *)
| GT of Region.t (* ">" *)
| EQEQ of Region.t (* "=" *)
| NE of Region.t (* "!=" *)
| LT of Region.t (* "<" *)
| GT of Region.t (* ">" *)
| LE of Region.t (* "=<" *)
| GE of Region.t (* ">=" *)
| ARROW of Region.t (* "=>" *)
| GE of Region.t (* ">=" *)
| BOOL_OR of Region.t (* "||" *)
| BOOL_AND of Region.t(* "&&" *)
| ARROW of Region.t (* "=>" *)
| NOT of Region.t (* ! *)
| BOOL_OR of Region.t (* "||" *)
| BOOL_AND of Region.t (* "&&" *)
| NOT of Region.t (* ! *)
(* Identifiers, labels, numbers and strings *)
@ -84,8 +84,8 @@ type t =
| Constr of string Region.reg
| Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg
| Mtz of (string * Z.t) Region.reg
| Str of string Region.reg
| Mutez of (string * Z.t) Region.reg
| String of string Region.reg
| Bytes of (string * Hex.t) Region.reg
(* Keywords *)

View File

@ -38,7 +38,7 @@ type t =
| VBAR of Region.t (* "|" *)
| COLON of Region.t (* ":" *)
| DOT of Region.t (* "." *)
| DOTDOTDOT of Region.t (* "..." *)
| ELLIPSIS of Region.t (* "..." *)
(* Wildcard *)
@ -66,8 +66,8 @@ type t =
| Constr of string Region.reg
| Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg
| Mtz of (string * Z.t) Region.reg
| Str of string Region.reg
| Mutez of (string * Z.t) Region.reg
| String of string Region.reg
| Bytes of (string * Hex.t) Region.reg
(* Keywords *)
@ -110,7 +110,7 @@ let proj_token = function
| VBAR region -> region, "VBAR"
| COLON region -> region, "COLON"
| DOT region -> region, "DOT"
| DOTDOTDOT region -> region, "DOTDOTDOT"
| ELLIPSIS region -> region, "ELLIPSIS"
| WILD region -> region, "WILD"
| EQ region -> region, "EQ"
| EQEQ region -> region, "EQEQ"
@ -130,10 +130,10 @@ let proj_token = function
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
| Nat Region.{region; value = s,n} ->
region, sprintf "Nat (\"%s\", %s)" s (Z.to_string n)
| Mtz Region.{region; value = s,n} ->
| Mutez Region.{region; value = s,n} ->
region, sprintf "Mutez (\"%s\", %s)" s (Z.to_string n)
| Str Region.{region; value} ->
region, sprintf "Str %s" value
| String Region.{region; value} ->
region, sprintf "String %s" value
| Bytes Region.{region; value = s,b} ->
region,
sprintf "Bytes (\"%s\", \"0x%s\")"
@ -169,7 +169,7 @@ let to_lexeme = function
| VBAR _ -> "|"
| COLON _ -> ":"
| DOT _ -> "."
| DOTDOTDOT _ -> "..."
| ELLIPSIS _ -> "..."
| WILD _ -> "_"
| EQ _ -> "="
| EQEQ _ -> "=="
@ -183,10 +183,10 @@ let to_lexeme = function
| BOOL_AND _ -> "&&"
| Ident id -> id.Region.value
| Constr id -> id.Region.value
| Int i
| Nat i
| Mtz i -> fst i.Region.value
| Str s -> s.Region.value
| Int i
| Nat i
| Mutez i -> fst i.Region.value
| String s -> s.Region.value
| Bytes b -> fst b.Region.value
| Else _ -> "else"
| False _ -> "false"
@ -200,7 +200,7 @@ let to_lexeme = function
| Type _ -> "type"
| C_None _ -> "None"
| C_Some _ -> "Some"
| EOF _ -> ""
| EOF _ -> ""
let to_string token ?(offsets=true) mode =
let region, val_str = proj_token token in
@ -231,11 +231,11 @@ let keywords = [
(fun reg -> Type reg);
]
(* See: http://caml.inria.fr/pub/docs/manual-ocaml/lex.html#sec86 and
(* See: http://caml.inria.fr/pub/docs/manual-ocaml/lex.html#sec86 and
https://github.com/facebook/reason/blob/master/src/reason-parser/reason_parser.mly *)
let reserved =
let open SSet in
empty
empty
|> add "and"
|> add "as"
|> add "asr"
@ -257,9 +257,9 @@ let reserved =
|> add "lazy"
(* |> add "lor" - see https://ligo.atlassian.net/browse/LIGO-263 *)
|> add "lsl"
|> add "lsr"
|> add "lsr"
(* |> add "lxor" - see https://ligo.atlassian.net/browse/LIGO-263 *)
|> add "match"
|> add "match"
|> add "method"
|> add "module"
|> add "mutable"
@ -284,7 +284,7 @@ let reserved =
let constructors = [
(fun reg -> C_None reg);
(fun reg -> C_Some reg);
(fun reg -> C_Some reg);
]
let add map (key, value) = SMap.add key value map
@ -346,7 +346,7 @@ let line_comment_start lexeme = lexeme = "//"
(* Smart constructors (injections) *)
let mk_string lexeme region = Str Region.{region; value=lexeme}
let mk_string lexeme region = String Region.{region; value=lexeme}
let mk_bytes lexeme region =
let norm = Str.(global_replace (regexp "_") "" lexeme) in
@ -376,12 +376,12 @@ let mk_mutez lexeme region =
Z.of_string in
if Z.equal z Z.zero && lexeme <> "0mutez"
then Error Non_canonical_zero
else Ok (Mtz Region.{region; value = lexeme, z})
else Ok (Mutez Region.{region; value = lexeme, z})
let eof region = EOF region
let mk_sym lexeme region =
match lexeme with
match lexeme with
"-" -> Ok (MINUS region)
| "+" -> Ok (PLUS region)
| "/" -> Ok (SLASH region)
@ -394,9 +394,9 @@ let mk_sym lexeme region =
| ";" -> Ok (SEMI region)
| "|" -> Ok (VBAR region)
| ":" -> Ok (COLON region)
| "." -> Ok (DOT region)
| "." -> Ok (DOT region)
| "_" -> Ok (WILD region)
| "=" -> Ok (EQ region)
| "=" -> Ok (EQ region)
| "!=" -> Ok (NE region)
| "<" -> Ok (LT region)
| ">" -> Ok (GT region)
@ -406,10 +406,10 @@ let mk_sym lexeme region =
| "&&" -> Ok (BOOL_AND region)
| "(" -> Ok (LPAR region)
| ")" -> Ok (RPAR region)
(* Symbols specific to ReasonLIGO *)
| "..."-> Ok (DOTDOTDOT region)
| "=>" -> Ok (ARROW region)
| "..."-> Ok (ELLIPSIS region)
| "=>" -> Ok (ARROW region)
| "==" -> Ok (EQEQ region)
| "!" -> Ok (NOT region)
| "++" -> Ok (CAT region)
@ -432,7 +432,7 @@ let mk_constr lexeme region = mk_constr' lexeme region lexicon
(* Predicates *)
let is_string = function
Str _ -> true
String _ -> true
| _ -> false
let is_bytes = function
@ -483,7 +483,7 @@ let is_sym = function
| VBAR _
| COLON _
| DOT _
| DOTDOTDOT _
| ELLIPSIS _
| WILD _
| EQ _
| EQEQ _
@ -501,4 +501,4 @@ let is_sym = function
let is_eof = function EOF _ -> true | _ -> false
(* END TRAILER *)
}
}

View File

@ -1,76 +1,75 @@
%{
%}
(* Tokens (mirroring thise defined in module LexToken) *)
(* Tokens (mirroring those defined in module LexToken) *)
(* Literals *)
%token <string Region.reg> Ident
%token <string Region.reg> Constr
%token <string Region.reg> Str
%token <(string * Z.t) Region.reg> Int
%token <(string * Z.t) Region.reg> Nat
%token <(string * Z.t) Region.reg> Mtz
%token <string Region.reg> Ident "<ident>"
%token <string Region.reg> Constr "<constr>"
%token <string Region.reg> String "<string>"
%token <(string * Z.t) Region.reg> Int "<int>"
%token <(string * Z.t) Region.reg> Nat "<nat>"
%token <(string * Z.t) Region.reg> Mutez "<mutez>"
(* Symbols *)
%token <Region.t> MINUS
%token <Region.t> PLUS
%token <Region.t> SLASH
%token <Region.t> TIMES
%token <Region.t> MINUS "-"
%token <Region.t> PLUS "+"
%token <Region.t> SLASH "/"
%token <Region.t> TIMES "*"
%token <Region.t> LPAR
%token <Region.t> RPAR
%token <Region.t> LBRACKET
%token <Region.t> RBRACKET
%token <Region.t> LBRACE
%token <Region.t> RBRACE
%token <Region.t> LPAR "("
%token <Region.t> RPAR ")"
%token <Region.t> LBRACKET "["
%token <Region.t> RBRACKET "]"
%token <Region.t> LBRACE "{"
%token <Region.t> RBRACE "}"
%token <Region.t> CAT
%token <Region.t> DOT
%token <Region.t> DOTDOTDOT
%token <Region.t> CAT "++"
%token <Region.t> DOT "."
%token <Region.t> ELLIPSIS "..."
%token <Region.t> COMMA
%token <Region.t> SEMI
%token <Region.t> COLON
%token <Region.t> VBAR
%token <Region.t> COMMA ","
%token <Region.t> SEMI ";"
%token <Region.t> COLON ":"
%token <Region.t> VBAR "|"
%token <Region.t> WILD
%token <Region.t> WILD "_"
%token <Region.t> EQ
%token <Region.t> EQEQ
%token <Region.t> NE
%token <Region.t> LT
%token <Region.t> GT
%token <Region.t> LE
%token <Region.t> GE
%token <Region.t> ARROW
%token <Region.t> EQ "="
%token <Region.t> EQEQ "=="
%token <Region.t> NE "!="
%token <Region.t> LT "<"
%token <Region.t> GT ">"
%token <Region.t> LE "<="
%token <Region.t> GE ">="
%token <Region.t> ARROW "=>"
%token <Region.t> NOT
%token <Region.t> BOOL_OR
%token <Region.t> BOOL_AND
%token <Region.t> NOT "!"
%token <Region.t> BOOL_OR "||"
%token <Region.t> BOOL_AND "&&"
(* Keywords *)
%token <Region.t> Else
%token <Region.t> False
%token <Region.t> If
%token <Region.t> Let
%token <Region.t> Switch
%token <Region.t> Mod
%token <Region.t> Or
%token <Region.t> True
%token <Region.t> Type
%token <Region.t> Else "else"
%token <Region.t> False "false"
%token <Region.t> If "if"
%token <Region.t> Let "let"
%token <Region.t> Switch "switch"
%token <Region.t> Mod "mod"
%token <Region.t> Or "or"
%token <Region.t> True "true"
%token <Region.t> Type "type"
(* Data constructors *)
%token <Region.t> C_None (* "None" *)
%token <Region.t> C_Some (* "Some" *)
%token <Region.t> C_None "None"
%token <Region.t> C_Some "Some"
(* Virtual tokens *)
%token <Region.t> EOF
%%

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
module AST = AST

View File

@ -0,0 +1,2 @@
module Region = Region
module Pos = Pos

View File

@ -155,11 +155,10 @@ let rec pattern_to_typed_var : Raw.pattern -> _ = fun p ->
| Raw.PWild r -> ok (({ region = r ; value = "_" } : Raw.variable) , None)
| _ -> fail @@ wrong_pattern "typed variable" p
let rec expr_to_typed_expr : Raw.expr -> _ = fun e ->
match e with
| EPar e -> expr_to_typed_expr e.value.inside
| EAnnot a -> ok (fst a.value , Some (snd a.value))
| _ -> ok (e , None)
let rec expr_to_typed_expr : Raw.expr -> _ = function
EPar e -> expr_to_typed_expr e.value.inside
| EAnnot {value={inside=e,_,t; _}; _} -> ok (e, Some t)
| e -> ok (e , None)
let patterns_to_var : Raw.pattern nseq -> _ = fun ps ->
match ps with
@ -266,7 +265,7 @@ let rec simpl_expression :
let%bind body = simpl_expression body in
return @@ e_let_in (Var.of_name variable.value , None) rhs' body
| Raw.EAnnot a ->
let (expr , type_expr), loc = r_split a in
let Raw.{inside=expr, _, type_expr; _}, loc = r_split a in
let%bind expr' = simpl_expression expr in
let%bind type_expr' = simpl_type_expression type_expr in
return @@ e_annotation ~loc expr' type_expr'