Merge branch 'rinderknecht-dev' into 'dev'
Fixing and adding errors in the parser See merge request ligolang/ligo!316
This commit is contained in:
commit
d49a72847a
@ -63,7 +63,7 @@ let moves: moveset = Map.literal
|
|||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let moves: moveset =
|
let moves : moveset =
|
||||||
Map.literal([
|
Map.literal([
|
||||||
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)),
|
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)),
|
||||||
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)),
|
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)),
|
||||||
@ -82,19 +82,19 @@ If we want to access a move from our moveset above, we can use the `[]` operator
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
const my_balance : option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
let my_balance : move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: option(move) =
|
let my_balance : option(move) =
|
||||||
Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
@ -106,19 +106,19 @@ Accessing a value in a map yields an option, however you can also get the value
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
const my_balance : move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
let my_balance : move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: move =
|
let my_balance : move =
|
||||||
Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -341,19 +341,19 @@ If we want to access a move from our moveset above, we can use the `[]` operator
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
const my_balance : option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
let my_balance : move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: option(move) =
|
let my_balance : option(move) =
|
||||||
Big_map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
Big_map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
@ -365,19 +365,19 @@ Accessing a value in a map yields an option, however you can also get the value
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
const my_balance : move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
let my_balance : move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
let my_balance : move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
@ -392,7 +392,7 @@ The values of a PascaLIGO big map can be updated using the ordinary assignment s
|
|||||||
|
|
||||||
```pascaligo
|
```pascaligo
|
||||||
|
|
||||||
function set_ (var m: moveset) : moveset is
|
function set_ (var m : moveset) : moveset is
|
||||||
block {
|
block {
|
||||||
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
|
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
|
||||||
} with m
|
} with m
|
||||||
@ -404,7 +404,7 @@ We can update a big map in CameLIGO using the `Big_map.update` built-in:
|
|||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
|
|
||||||
let updated_map: moveset =
|
let updated_map : moveset =
|
||||||
Big_map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves
|
Big_map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -494,12 +494,12 @@ If we want to obtain a value from a record for a given key, we can do the follow
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const is_admin: bool = user.is_admin;
|
const is_admin : bool = user.is_admin;
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
```cameligo
|
```cameligo
|
||||||
let is_admin: bool = user.is_admin
|
let is_admin : bool = user.is_admin
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
@ -137,12 +137,14 @@ type ident_err = Reserved_name
|
|||||||
type nat_err = Invalid_natural
|
type nat_err = Invalid_natural
|
||||||
| Non_canonical_zero_nat
|
| Non_canonical_zero_nat
|
||||||
type sym_err = Invalid_symbol
|
type sym_err = Invalid_symbol
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
||||||
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||||
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
||||||
|
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
|
||||||
val mk_string : lexeme -> Region.t -> token
|
val mk_string : lexeme -> Region.t -> token
|
||||||
val mk_bytes : lexeme -> Region.t -> token
|
val mk_bytes : lexeme -> Region.t -> token
|
||||||
val mk_constr : lexeme -> Region.t -> token
|
val mk_constr : lexeme -> Region.t -> token
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
{
|
{
|
||||||
|
(* START HEADER *)
|
||||||
|
|
||||||
type lexeme = string
|
type lexeme = string
|
||||||
|
|
||||||
let sprintf = Printf.sprintf
|
let sprintf = Printf.sprintf
|
||||||
@ -236,8 +238,7 @@ let to_region token = proj_token token |> fst
|
|||||||
|
|
||||||
(* Injections *)
|
(* Injections *)
|
||||||
|
|
||||||
type int_err =
|
type int_err = Non_canonical_zero
|
||||||
Non_canonical_zero
|
|
||||||
|
|
||||||
(* LEXIS *)
|
(* LEXIS *)
|
||||||
|
|
||||||
@ -258,8 +259,7 @@ let keywords = [
|
|||||||
(fun reg -> Then reg);
|
(fun reg -> Then reg);
|
||||||
(fun reg -> True reg);
|
(fun reg -> True reg);
|
||||||
(fun reg -> Type reg);
|
(fun reg -> Type reg);
|
||||||
(fun reg -> With reg)
|
(fun reg -> With reg)]
|
||||||
]
|
|
||||||
|
|
||||||
let reserved =
|
let reserved =
|
||||||
let open SSet in
|
let open SSet in
|
||||||
@ -323,8 +323,20 @@ let lexicon : lexis =
|
|||||||
cstr = build constructors;
|
cstr = build constructors;
|
||||||
res = reserved}
|
res = reserved}
|
||||||
|
|
||||||
|
(* Keywords *)
|
||||||
|
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
|
let mk_kwd ident region =
|
||||||
|
match SMap.find_opt ident lexicon.kwd with
|
||||||
|
Some mk_kwd -> Ok (mk_kwd region)
|
||||||
|
| None -> Error Invalid_keyword
|
||||||
|
|
||||||
|
(* Identifiers *)
|
||||||
|
|
||||||
type ident_err = Reserved_name
|
type ident_err = Reserved_name
|
||||||
|
|
||||||
|
(* END OF HEADER *)
|
||||||
}
|
}
|
||||||
|
|
||||||
(* START LEXER DEFINITION *)
|
(* START LEXER DEFINITION *)
|
||||||
|
@ -19,9 +19,9 @@ module ExtParserLog =
|
|||||||
include ParserLog
|
include ParserLog
|
||||||
end
|
end
|
||||||
|
|
||||||
module M = ParserUnit.Make (IO)
|
module MyLexer = Lexer.Make (LexToken)
|
||||||
(Lexer.Make (LexToken))
|
|
||||||
(AST)
|
module Unit =
|
||||||
(ExtParser)
|
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
|
||||||
(ParErr)
|
|
||||||
(ExtParserLog)
|
let () = Unit.run ()
|
||||||
|
@ -5,6 +5,7 @@ module AST = Parser_pascaligo.AST
|
|||||||
module ParserLog = Parser_pascaligo.ParserLog
|
module ParserLog = Parser_pascaligo.ParserLog
|
||||||
module LexToken = Parser_pascaligo.LexToken
|
module LexToken = Parser_pascaligo.LexToken
|
||||||
module Lexer = Lexer.Make(LexToken)
|
module Lexer = Lexer.Make(LexToken)
|
||||||
|
module SyntaxError = Parser_pascaligo.SyntaxError
|
||||||
|
|
||||||
module Errors = struct
|
module Errors = struct
|
||||||
|
|
||||||
@ -18,6 +19,34 @@ module Errors = struct
|
|||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
|
||||||
|
let reserved_name Region.{value; region} =
|
||||||
|
let title () = Printf.sprintf "reserved name \"%s\"" value in
|
||||||
|
let message () = "" in
|
||||||
|
let data = [
|
||||||
|
("location",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let duplicate_parameter Region.{value; region} =
|
||||||
|
let title () = Printf.sprintf "duplicate parameter \"%s\"" value in
|
||||||
|
let message () = "" in
|
||||||
|
let data = [
|
||||||
|
("location",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let duplicate_variant Region.{value; region} =
|
||||||
|
let title () = Printf.sprintf "duplicate variant \"%s\" in this\
|
||||||
|
type declaration" value in
|
||||||
|
let message () = "" in
|
||||||
|
let data = [
|
||||||
|
("location",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
|
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
|
||||||
let title () = "parser error" in
|
let title () = "parser error" in
|
||||||
let file = if source = "" then
|
let file = if source = "" then
|
||||||
@ -82,6 +111,12 @@ let parse (parser: 'a parser) source lexbuf =
|
|||||||
try
|
try
|
||||||
ok (parser read lexbuf)
|
ok (parser read lexbuf)
|
||||||
with
|
with
|
||||||
|
SyntaxError.Error (Duplicate_parameter name) ->
|
||||||
|
fail @@ (duplicate_parameter name)
|
||||||
|
| SyntaxError.Error (Duplicate_variant name) ->
|
||||||
|
fail @@ (duplicate_variant name)
|
||||||
|
| SyntaxError.Error (Reserved_name name) ->
|
||||||
|
fail @@ (reserved_name name)
|
||||||
| Parser.Error ->
|
| Parser.Error ->
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
let start = Lexing.lexeme_start_p lexbuf in
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||||
|
@ -760,4 +760,49 @@ let rhs_to_region = expr_to_region
|
|||||||
|
|
||||||
let selection_to_region = function
|
let selection_to_region = function
|
||||||
FieldName {region; _}
|
FieldName {region; _}
|
||||||
| Component {region; _} -> region
|
| Component {region; _} -> region
|
||||||
|
|
||||||
|
(* Extracting variables from patterns *)
|
||||||
|
|
||||||
|
module Ord =
|
||||||
|
struct
|
||||||
|
type t = string Region.reg
|
||||||
|
let compare v1 v2 =
|
||||||
|
compare v1.value v2.value
|
||||||
|
end
|
||||||
|
|
||||||
|
module VSet = Set.Make (Ord)
|
||||||
|
|
||||||
|
let rec vars_of_pattern env = function
|
||||||
|
PConstr p -> vars_of_pconstr env p
|
||||||
|
| PVar v -> VSet.add v env
|
||||||
|
| PWild _ | PInt _ | PNat _ | PBytes _ | PString _ -> env
|
||||||
|
| PList l -> vars_of_plist env l
|
||||||
|
| PTuple t -> vars_of_ptuple env t.value
|
||||||
|
|
||||||
|
and vars_of_pconstr env = function
|
||||||
|
PUnit _ | PFalse _ | PTrue _ | PNone _ -> env
|
||||||
|
| PSomeApp {value=_, {value={inside; _};_}; _} ->
|
||||||
|
vars_of_pattern env inside
|
||||||
|
| PConstrApp {value=_, Some tuple; _} ->
|
||||||
|
vars_of_ptuple env tuple.value
|
||||||
|
| PConstrApp {value=_,None; _} -> env
|
||||||
|
|
||||||
|
and vars_of_plist env = function
|
||||||
|
PListComp {value; _} ->
|
||||||
|
vars_of_pinj env value
|
||||||
|
| PNil _ ->
|
||||||
|
env
|
||||||
|
| PParCons {value={inside; _}; _} ->
|
||||||
|
let head, _, tail = inside in
|
||||||
|
vars_of_pattern (vars_of_pattern env head) tail
|
||||||
|
| PCons {value; _} ->
|
||||||
|
Utils.nsepseq_foldl vars_of_pattern env value
|
||||||
|
|
||||||
|
and vars_of_pinj env inj =
|
||||||
|
Utils.sepseq_foldl vars_of_pattern env inj.elements
|
||||||
|
|
||||||
|
and vars_of_ptuple env {inside; _} =
|
||||||
|
Utils.nsepseq_foldl vars_of_pattern env inside
|
||||||
|
|
||||||
|
let vars_of_pattern = vars_of_pattern VSet.empty
|
||||||
|
@ -615,3 +615,9 @@ val lhs_to_region : lhs -> Region.t
|
|||||||
val rhs_to_region : rhs -> Region.t
|
val rhs_to_region : rhs -> Region.t
|
||||||
val if_clause_to_region : if_clause -> Region.t
|
val if_clause_to_region : if_clause -> Region.t
|
||||||
val selection_to_region : selection -> Region.t
|
val selection_to_region : selection -> Region.t
|
||||||
|
|
||||||
|
(* Extracting variables from patterns *)
|
||||||
|
|
||||||
|
module VSet : Set.S with type elt = string Region.reg
|
||||||
|
|
||||||
|
val vars_of_pattern : pattern -> VSet.t
|
||||||
|
@ -138,12 +138,14 @@ type ident_err = Reserved_name
|
|||||||
type nat_err = Invalid_natural
|
type nat_err = Invalid_natural
|
||||||
| Non_canonical_zero_nat
|
| Non_canonical_zero_nat
|
||||||
type sym_err = Invalid_symbol
|
type sym_err = Invalid_symbol
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
||||||
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||||
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
||||||
|
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
|
||||||
val mk_string : lexeme -> Region.t -> token
|
val mk_string : lexeme -> Region.t -> token
|
||||||
val mk_bytes : lexeme -> Region.t -> token
|
val mk_bytes : lexeme -> Region.t -> token
|
||||||
val mk_constr : lexeme -> Region.t -> token
|
val mk_constr : lexeme -> Region.t -> token
|
||||||
|
@ -389,6 +389,15 @@ let lexicon : lexis =
|
|||||||
cstr = build constructors;
|
cstr = build constructors;
|
||||||
res = reserved}
|
res = reserved}
|
||||||
|
|
||||||
|
(* Keywords *)
|
||||||
|
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
|
let mk_kwd ident region =
|
||||||
|
match SMap.find_opt ident lexicon.kwd with
|
||||||
|
Some mk_kwd -> Ok (mk_kwd region)
|
||||||
|
| None -> Error Invalid_keyword
|
||||||
|
|
||||||
(* Identifiers *)
|
(* Identifiers *)
|
||||||
|
|
||||||
type ident_err = Reserved_name
|
type ident_err = Reserved_name
|
||||||
|
@ -6,7 +6,60 @@
|
|||||||
open Region
|
open Region
|
||||||
open AST
|
open AST
|
||||||
|
|
||||||
(* END HEADER *)
|
module SSet = Utils.String.Set
|
||||||
|
|
||||||
|
let reserved =
|
||||||
|
let open SSet in
|
||||||
|
empty
|
||||||
|
|> add "get_force"
|
||||||
|
|> add "get_chain_id"
|
||||||
|
|> add "transaction"
|
||||||
|
|> add "get_contract"
|
||||||
|
|> add "get_entrypoint"
|
||||||
|
|> add "size"
|
||||||
|
|> add "int"
|
||||||
|
|> add "abs"
|
||||||
|
|> add "is_nat"
|
||||||
|
|> add "amount"
|
||||||
|
|> add "balance"
|
||||||
|
|> add "now"
|
||||||
|
|> add "unit"
|
||||||
|
|> add "source"
|
||||||
|
|> add "sender"
|
||||||
|
|> add "failwith"
|
||||||
|
|> add "bitwise_or"
|
||||||
|
|> add "bitwise_and"
|
||||||
|
|> add "bitwise_xor"
|
||||||
|
|> add "string_concat"
|
||||||
|
|> add "string_slice"
|
||||||
|
|> add "crypto_check"
|
||||||
|
|> add "crypto_hash_key"
|
||||||
|
|> add "bytes_concat"
|
||||||
|
|> add "bytes_slice"
|
||||||
|
|> add "bytes_pack"
|
||||||
|
|> add "bytes_unpack"
|
||||||
|
|> add "set_empty"
|
||||||
|
|> add "set_mem"
|
||||||
|
|> add "set_add"
|
||||||
|
|> add "set_remove"
|
||||||
|
|> add "set_iter"
|
||||||
|
|> add "set_fold"
|
||||||
|
|> add "list_iter"
|
||||||
|
|> add "list_fold"
|
||||||
|
|> add "list_map"
|
||||||
|
|> add "map_iter"
|
||||||
|
|> add "map_map"
|
||||||
|
|> add "map_fold"
|
||||||
|
|> add "map_remove"
|
||||||
|
|> add "map_update"
|
||||||
|
|> add "map_get"
|
||||||
|
|> add "map_mem"
|
||||||
|
|> add "sha_256"
|
||||||
|
|> add "sha_512"
|
||||||
|
|> add "blake2b"
|
||||||
|
|> add "cons"
|
||||||
|
|
||||||
|
(* END HEADER *)
|
||||||
%}
|
%}
|
||||||
|
|
||||||
(* See [ParToken.mly] for the definition of tokens. *)
|
(* See [ParToken.mly] for the definition of tokens. *)
|
||||||
@ -118,6 +171,10 @@ declaration:
|
|||||||
|
|
||||||
type_decl:
|
type_decl:
|
||||||
"type" type_name "is" type_expr ";"? {
|
"type" type_name "is" type_expr ";"? {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $2.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $2)) in
|
||||||
let stop =
|
let stop =
|
||||||
match $5 with
|
match $5 with
|
||||||
Some region -> region
|
Some region -> region
|
||||||
@ -185,6 +242,14 @@ type_tuple:
|
|||||||
|
|
||||||
sum_type:
|
sum_type:
|
||||||
"|"? nsepseq(variant,"|") {
|
"|"? nsepseq(variant,"|") {
|
||||||
|
let add acc {value; _} =
|
||||||
|
if VSet.mem value.constr acc then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Duplicate_variant value.constr))
|
||||||
|
else VSet.add value.constr acc in
|
||||||
|
let variants =
|
||||||
|
Utils.nsepseq_foldl add VSet.empty $2 in
|
||||||
|
let () = ignore variants in
|
||||||
let region = nsepseq_to_region (fun x -> x.region) $2
|
let region = nsepseq_to_region (fun x -> x.region) $2
|
||||||
in TSum {region; value=$2} }
|
in TSum {region; value=$2} }
|
||||||
|
|
||||||
@ -225,6 +290,13 @@ fun_expr:
|
|||||||
"function" fun_name? parameters ":" type_expr "is"
|
"function" fun_name? parameters ":" type_expr "is"
|
||||||
block
|
block
|
||||||
"with" expr {
|
"with" expr {
|
||||||
|
let () =
|
||||||
|
match $2 with
|
||||||
|
Some name ->
|
||||||
|
if SSet.mem name.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name name))
|
||||||
|
| None -> () in
|
||||||
let stop = expr_to_region $9 in
|
let stop = expr_to_region $9 in
|
||||||
let region = cover $1 stop
|
let region = cover $1 stop
|
||||||
and value = {kwd_function = $1;
|
and value = {kwd_function = $1;
|
||||||
@ -237,6 +309,13 @@ fun_expr:
|
|||||||
return = $9}
|
return = $9}
|
||||||
in {region; value} }
|
in {region; value} }
|
||||||
| "function" fun_name? parameters ":" type_expr "is" expr {
|
| "function" fun_name? parameters ":" type_expr "is" expr {
|
||||||
|
let () =
|
||||||
|
match $2 with
|
||||||
|
Some name ->
|
||||||
|
if SSet.mem name.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name name))
|
||||||
|
| None -> () in
|
||||||
let stop = expr_to_region $7 in
|
let stop = expr_to_region $7 in
|
||||||
let region = cover $1 stop
|
let region = cover $1 stop
|
||||||
and value = {kwd_function = $1;
|
and value = {kwd_function = $1;
|
||||||
@ -256,7 +335,7 @@ fun_decl:
|
|||||||
open_fun_decl { $1 }
|
open_fun_decl { $1 }
|
||||||
| fun_expr ";" {
|
| fun_expr ";" {
|
||||||
let region = cover $1.region $2
|
let region = cover $1.region $2
|
||||||
and value = {fun_expr=$1; terminator= Some $2}
|
and value = {fun_expr=$1; terminator = Some $2}
|
||||||
in {region; value} }
|
in {region; value} }
|
||||||
|
|
||||||
open_fun_decl:
|
open_fun_decl:
|
||||||
@ -266,10 +345,31 @@ open_fun_decl:
|
|||||||
in {region; value} }
|
in {region; value} }
|
||||||
|
|
||||||
parameters:
|
parameters:
|
||||||
par(nsepseq(param_decl,";")) { $1 }
|
par(nsepseq(param_decl,";")) {
|
||||||
|
let open! AST in
|
||||||
|
let contents : (param_decl, semi) Utils.nsepseq par reg = $1 in
|
||||||
|
let add acc = function
|
||||||
|
ParamConst {value; _} ->
|
||||||
|
if VSet.mem value.var acc then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Duplicate_parameter value.var))
|
||||||
|
else VSet.add value.var acc
|
||||||
|
| ParamVar {value; _} ->
|
||||||
|
if VSet.mem value.var acc then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Duplicate_parameter value.var))
|
||||||
|
else VSet.add value.var acc in
|
||||||
|
let params =
|
||||||
|
Utils.nsepseq_foldl add VSet.empty contents.value.inside in
|
||||||
|
let () = ignore params
|
||||||
|
in $1 }
|
||||||
|
|
||||||
param_decl:
|
param_decl:
|
||||||
"var" var ":" param_type {
|
"var" var ":" param_type {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $2.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $2)) in
|
||||||
let stop = type_expr_to_region $4 in
|
let stop = type_expr_to_region $4 in
|
||||||
let region = cover $1 stop
|
let region = cover $1 stop
|
||||||
and value = {kwd_var = $1;
|
and value = {kwd_var = $1;
|
||||||
@ -279,6 +379,10 @@ param_decl:
|
|||||||
in ParamVar {region; value}
|
in ParamVar {region; value}
|
||||||
}
|
}
|
||||||
| "const" var ":" param_type {
|
| "const" var ":" param_type {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $2.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $2)) in
|
||||||
let stop = type_expr_to_region $4 in
|
let stop = type_expr_to_region $4 in
|
||||||
let region = cover $1 stop
|
let region = cover $1 stop
|
||||||
and value = {kwd_const = $1;
|
and value = {kwd_const = $1;
|
||||||
@ -346,13 +450,16 @@ open_var_decl:
|
|||||||
|
|
||||||
unqualified_decl(OP):
|
unqualified_decl(OP):
|
||||||
var ":" type_expr OP expr {
|
var ":" type_expr OP expr {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $1.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $1)) in
|
||||||
let region = expr_to_region $5
|
let region = expr_to_region $5
|
||||||
in $1, $2, $3, $4, $5, region }
|
in $1, $2, $3, $4, $5, region }
|
||||||
|
|
||||||
const_decl:
|
const_decl:
|
||||||
open_const_decl { $1 }
|
open_const_decl ";"? {
|
||||||
| open_const_decl ";" {
|
{$1 with value = {$1.value with terminator=$2}} }
|
||||||
{$1 with value = {$1.value with terminator = Some $2}} }
|
|
||||||
|
|
||||||
instruction:
|
instruction:
|
||||||
conditional { Cond $1 }
|
conditional { Cond $1 }
|
||||||
@ -555,6 +662,14 @@ cases(rhs):
|
|||||||
|
|
||||||
case_clause(rhs):
|
case_clause(rhs):
|
||||||
pattern "->" rhs {
|
pattern "->" rhs {
|
||||||
|
let vars = AST.vars_of_pattern $1 in
|
||||||
|
let is_reserved elt = SSet.mem elt.value reserved in
|
||||||
|
let inter = VSet.filter is_reserved vars in
|
||||||
|
let () =
|
||||||
|
if not (VSet.is_empty inter) then
|
||||||
|
let clash = VSet.choose inter in
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name clash)) in
|
||||||
fun rhs_to_region ->
|
fun rhs_to_region ->
|
||||||
let start = pattern_to_region $1 in
|
let start = pattern_to_region $1 in
|
||||||
let region = cover start (rhs_to_region $3)
|
let region = cover start (rhs_to_region $3)
|
||||||
@ -596,6 +711,10 @@ for_loop:
|
|||||||
in For (ForInt {region; value})
|
in For (ForInt {region; value})
|
||||||
}
|
}
|
||||||
| "for" var arrow_clause? "in" collection expr block {
|
| "for" var arrow_clause? "in" collection expr block {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $2.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $2)) in
|
||||||
let region = cover $1 $7.region in
|
let region = cover $1 $7.region in
|
||||||
let value = {kwd_for = $1;
|
let value = {kwd_for = $1;
|
||||||
var = $2;
|
var = $2;
|
||||||
@ -613,12 +732,21 @@ collection:
|
|||||||
|
|
||||||
var_assign:
|
var_assign:
|
||||||
var ":=" expr {
|
var ":=" expr {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $1.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $1)) in
|
||||||
let region = cover $1.region (expr_to_region $3)
|
let region = cover $1.region (expr_to_region $3)
|
||||||
and value = {name=$1; assign=$2; expr=$3}
|
and value = {name=$1; assign=$2; expr=$3}
|
||||||
in {region; value} }
|
in {region; value} }
|
||||||
|
|
||||||
arrow_clause:
|
arrow_clause:
|
||||||
"->" var { $1,$2 }
|
"->" var {
|
||||||
|
let () =
|
||||||
|
if SSet.mem $2.value reserved then
|
||||||
|
let open! SyntaxError in
|
||||||
|
raise (Error (Reserved_name $2))
|
||||||
|
in $1,$2 }
|
||||||
|
|
||||||
(* Expressions *)
|
(* Expressions *)
|
||||||
|
|
||||||
|
@ -19,9 +19,55 @@ module ExtParserLog =
|
|||||||
include ParserLog
|
include ParserLog
|
||||||
end
|
end
|
||||||
|
|
||||||
module M = ParserUnit.Make (IO)
|
module MyLexer = Lexer.Make (LexToken)
|
||||||
(Lexer.Make (LexToken))
|
|
||||||
(AST)
|
module Unit =
|
||||||
(ExtParser)
|
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
|
||||||
(ParErr)
|
|
||||||
(ExtParserLog)
|
open! SyntaxError
|
||||||
|
|
||||||
|
let () =
|
||||||
|
try Unit.run () with
|
||||||
|
(* Ad hoc errors from the parser *)
|
||||||
|
|
||||||
|
Error (Reserved_name name) ->
|
||||||
|
let () = Unit.close_all () in
|
||||||
|
let token =
|
||||||
|
MyLexer.Token.mk_ident name.Region.value name.Region.region in
|
||||||
|
(match token with
|
||||||
|
Stdlib.Error _ ->
|
||||||
|
assert false (* Should not fail if [name] is valid. *)
|
||||||
|
| Ok invalid ->
|
||||||
|
let point = "Reserved name.\nHint: Change the name.\n",
|
||||||
|
None, invalid in
|
||||||
|
let error =
|
||||||
|
Unit.format_error ~offsets:IO.options#offsets
|
||||||
|
IO.options#mode point
|
||||||
|
in Printf.eprintf "\027[31m%s\027[0m%!" error)
|
||||||
|
|
||||||
|
| Error (Duplicate_parameter name) ->
|
||||||
|
let () = Unit.close_all () in
|
||||||
|
let token =
|
||||||
|
MyLexer.Token.mk_ident name.Region.value name.Region.region in
|
||||||
|
(match token with
|
||||||
|
Stdlib.Error _ ->
|
||||||
|
assert false (* Should not fail if [name] is valid. *)
|
||||||
|
| Ok invalid ->
|
||||||
|
let point = "Duplicate parameter.\nHint: Change the name.\n",
|
||||||
|
None, invalid in
|
||||||
|
let error =
|
||||||
|
Unit.format_error ~offsets:IO.options#offsets
|
||||||
|
IO.options#mode point
|
||||||
|
in Printf.eprintf "\027[31m%s\027[0m%!" error)
|
||||||
|
|
||||||
|
| Error (Duplicate_variant name) ->
|
||||||
|
let () = Unit.close_all () in
|
||||||
|
let token =
|
||||||
|
MyLexer.Token.mk_constr name.Region.value name.Region.region in
|
||||||
|
let point = "Duplicate variant in this type declaration.\n\
|
||||||
|
Hint: Change the name.\n",
|
||||||
|
None, token in
|
||||||
|
let error =
|
||||||
|
Unit.format_error ~offsets:IO.options#offsets
|
||||||
|
IO.options#mode point
|
||||||
|
in Printf.eprintf "\027[31m%s\027[0m%!" error
|
||||||
|
8
src/passes/1-parser/pascaligo/SyntaxError.ml
Normal file
8
src/passes/1-parser/pascaligo/SyntaxError.ml
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
type t =
|
||||||
|
Reserved_name of string Region.reg
|
||||||
|
| Duplicate_parameter of string Region.reg
|
||||||
|
| Duplicate_variant of string Region.reg
|
||||||
|
|
||||||
|
type error = t
|
||||||
|
|
||||||
|
exception Error of t
|
8
src/passes/1-parser/pascaligo/SyntaxError.mli
Normal file
8
src/passes/1-parser/pascaligo/SyntaxError.mli
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
type t =
|
||||||
|
Reserved_name of string Region.reg
|
||||||
|
| Duplicate_parameter of string Region.reg
|
||||||
|
| Duplicate_variant of string Region.reg
|
||||||
|
|
||||||
|
type error = t
|
||||||
|
|
||||||
|
exception Error of t
|
@ -8,7 +8,8 @@
|
|||||||
(library
|
(library
|
||||||
(name parser_pascaligo)
|
(name parser_pascaligo)
|
||||||
(public_name ligo.parser.pascaligo)
|
(public_name ligo.parser.pascaligo)
|
||||||
(modules AST pascaligo Parser ParserLog LexToken)
|
(modules
|
||||||
|
SyntaxError AST pascaligo Parser ParserLog LexToken)
|
||||||
(libraries
|
(libraries
|
||||||
menhirLib
|
menhirLib
|
||||||
parser_shared
|
parser_shared
|
||||||
|
@ -135,15 +135,17 @@ type ident_err = Reserved_name
|
|||||||
type nat_err = Invalid_natural
|
type nat_err = Invalid_natural
|
||||||
| Non_canonical_zero_nat
|
| Non_canonical_zero_nat
|
||||||
type sym_err = Invalid_symbol
|
type sym_err = Invalid_symbol
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
val mk_string : lexeme -> Region.t -> token
|
|
||||||
val mk_bytes : lexeme -> Region.t -> token
|
|
||||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
|
||||||
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
|
||||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||||
val mk_constr : lexeme -> Region.t -> token
|
|
||||||
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
|
||||||
|
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
|
||||||
|
val mk_string : lexeme -> Region.t -> token
|
||||||
|
val mk_bytes : lexeme -> Region.t -> token
|
||||||
|
val mk_constr : lexeme -> Region.t -> token
|
||||||
val eof : Region.t -> token
|
val eof : Region.t -> token
|
||||||
|
|
||||||
(* Predicates *)
|
(* Predicates *)
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
{
|
{
|
||||||
|
(* START OF HEADER *)
|
||||||
|
|
||||||
type lexeme = string
|
type lexeme = string
|
||||||
|
|
||||||
let sprintf = Printf.sprintf
|
let sprintf = Printf.sprintf
|
||||||
@ -91,116 +93,117 @@ type t =
|
|||||||
|
|
||||||
| EOF of Region.t (* End of file *)
|
| EOF of Region.t (* End of file *)
|
||||||
|
|
||||||
|
|
||||||
type token = t
|
type token = t
|
||||||
|
|
||||||
let proj_token = function
|
let proj_token = function
|
||||||
| CAT region -> region, "CAT"
|
CAT region -> region, "CAT"
|
||||||
| MINUS region -> region, "MINUS"
|
| MINUS region -> region, "MINUS"
|
||||||
| PLUS region -> region, "PLUS"
|
| PLUS region -> region, "PLUS"
|
||||||
| SLASH region -> region, "SLASH"
|
| SLASH region -> region, "SLASH"
|
||||||
| TIMES region -> region, "TIMES"
|
| TIMES region -> region, "TIMES"
|
||||||
| LPAR region -> region, "LPAR"
|
| LPAR region -> region, "LPAR"
|
||||||
| RPAR region -> region, "RPAR"
|
| RPAR region -> region, "RPAR"
|
||||||
| LBRACKET region -> region, "LBRACKET"
|
| LBRACKET region -> region, "LBRACKET"
|
||||||
| RBRACKET region -> region, "RBRACKET"
|
| RBRACKET region -> region, "RBRACKET"
|
||||||
| LBRACE region -> region, "LBRACE"
|
| LBRACE region -> region, "LBRACE"
|
||||||
| RBRACE region -> region, "RBRACE"
|
| RBRACE region -> region, "RBRACE"
|
||||||
| COMMA region -> region, "COMMA"
|
| COMMA region -> region, "COMMA"
|
||||||
| SEMI region -> region, "SEMI"
|
| SEMI region -> region, "SEMI"
|
||||||
| VBAR region -> region, "VBAR"
|
| VBAR region -> region, "VBAR"
|
||||||
| COLON region -> region, "COLON"
|
| COLON region -> region, "COLON"
|
||||||
| DOT region -> region, "DOT"
|
| DOT region -> region, "DOT"
|
||||||
| ELLIPSIS region -> region, "ELLIPSIS"
|
| ELLIPSIS region -> region, "ELLIPSIS"
|
||||||
| WILD region -> region, "WILD"
|
| WILD region -> region, "WILD"
|
||||||
| EQ region -> region, "EQ"
|
| EQ region -> region, "EQ"
|
||||||
| EQEQ region -> region, "EQEQ"
|
| EQEQ region -> region, "EQEQ"
|
||||||
| NE region -> region, "NE"
|
| NE region -> region, "NE"
|
||||||
| LT region -> region, "LT"
|
| LT region -> region, "LT"
|
||||||
| GT region -> region, "GT"
|
| GT region -> region, "GT"
|
||||||
| LE region -> region, "LE"
|
| LE region -> region, "LE"
|
||||||
| GE region -> region, "GE"
|
| GE region -> region, "GE"
|
||||||
| ARROW region -> region, "ARROW"
|
| ARROW region -> region, "ARROW"
|
||||||
| BOOL_OR region -> region, "BOOL_OR"
|
| BOOL_OR region -> region, "BOOL_OR"
|
||||||
| BOOL_AND region -> region, "BOOL_AND"
|
| BOOL_AND region -> region, "BOOL_AND"
|
||||||
| Ident Region.{region; value} ->
|
| Ident Region.{region; value} ->
|
||||||
region, sprintf "Ident %s" value
|
region, sprintf "Ident %s" value
|
||||||
| Constr Region.{region; value} ->
|
| Constr Region.{region; value} ->
|
||||||
region, sprintf "Constr %s" value
|
region, sprintf "Constr %s" value
|
||||||
| Int Region.{region; value = s,n} ->
|
| Int Region.{region; value = s,n} ->
|
||||||
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
|
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
|
||||||
| Nat Region.{region; value = s,n} ->
|
| Nat Region.{region; value = s,n} ->
|
||||||
region, sprintf "Nat (\"%s\", %s)" s (Z.to_string n)
|
region, sprintf "Nat (\"%s\", %s)" s (Z.to_string n)
|
||||||
| Mutez Region.{region; value = s,n} ->
|
| Mutez Region.{region; value = s,n} ->
|
||||||
region, sprintf "Mutez (\"%s\", %s)" s (Z.to_string n)
|
region, sprintf "Mutez (\"%s\", %s)" s (Z.to_string n)
|
||||||
| String Region.{region; value} ->
|
| String Region.{region; value} ->
|
||||||
region, sprintf "String %s" value
|
region, sprintf "String %s" value
|
||||||
| Bytes Region.{region; value = s,b} ->
|
| Bytes Region.{region; value = s,b} ->
|
||||||
region,
|
region,
|
||||||
sprintf "Bytes (\"%s\", \"0x%s\")"
|
sprintf "Bytes (\"%s\", \"0x%s\")"
|
||||||
s (Hex.to_string b)
|
s (Hex.to_string b)
|
||||||
| Else region -> region, "Else"
|
| Else region -> region, "Else"
|
||||||
| False region -> region, "False"
|
| False region -> region, "False"
|
||||||
| If region -> region, "If"
|
| If region -> region, "If"
|
||||||
| Let region -> region, "Let"
|
| Let region -> region, "Let"
|
||||||
| Switch region -> region, "Switch"
|
| Switch region -> region, "Switch"
|
||||||
| Mod region -> region, "Mod"
|
| Mod region -> region, "Mod"
|
||||||
| NOT region -> region, "!"
|
| NOT region -> region, "!"
|
||||||
| Or region -> region, "Or"
|
| Or region -> region, "Or"
|
||||||
| True region -> region, "True"
|
| True region -> region, "True"
|
||||||
| Type region -> region, "Type"
|
| Type region -> region, "Type"
|
||||||
| C_None region -> region, "C_None"
|
| C_None region -> region, "C_None"
|
||||||
| C_Some region -> region, "C_Some"
|
| C_Some region -> region, "C_Some"
|
||||||
| EOF region -> region, "EOF"
|
| EOF region -> region, "EOF"
|
||||||
|
|
||||||
let to_lexeme = function
|
let to_lexeme = function
|
||||||
| CAT _ -> "++"
|
CAT _ -> "++"
|
||||||
| MINUS _ -> "-"
|
| MINUS _ -> "-"
|
||||||
| PLUS _ -> "+"
|
| PLUS _ -> "+"
|
||||||
| SLASH _ -> "/"
|
| SLASH _ -> "/"
|
||||||
| TIMES _ -> "*"
|
| TIMES _ -> "*"
|
||||||
| LPAR _ -> "("
|
| LPAR _ -> "("
|
||||||
| RPAR _ -> ")"
|
| RPAR _ -> ")"
|
||||||
| LBRACKET _ -> "["
|
| LBRACKET _ -> "["
|
||||||
| RBRACKET _ -> "]"
|
| RBRACKET _ -> "]"
|
||||||
| LBRACE _ -> "{"
|
| LBRACE _ -> "{"
|
||||||
| RBRACE _ -> "}"
|
| RBRACE _ -> "}"
|
||||||
| COMMA _ -> ","
|
| COMMA _ -> ","
|
||||||
| SEMI _ -> ";"
|
| SEMI _ -> ";"
|
||||||
| VBAR _ -> "|"
|
| VBAR _ -> "|"
|
||||||
| COLON _ -> ":"
|
| COLON _ -> ":"
|
||||||
| DOT _ -> "."
|
| DOT _ -> "."
|
||||||
| ELLIPSIS _ -> "..."
|
| ELLIPSIS _ -> "..."
|
||||||
| WILD _ -> "_"
|
| WILD _ -> "_"
|
||||||
| EQ _ -> "="
|
| EQ _ -> "="
|
||||||
| EQEQ _ -> "=="
|
| EQEQ _ -> "=="
|
||||||
| NE _ -> "!="
|
| NE _ -> "!="
|
||||||
| LT _ -> "<"
|
| LT _ -> "<"
|
||||||
| GT _ -> ">"
|
| GT _ -> ">"
|
||||||
| LE _ -> "<="
|
| LE _ -> "<="
|
||||||
| GE _ -> ">="
|
| GE _ -> ">="
|
||||||
| ARROW _ -> "=>"
|
| ARROW _ -> "=>"
|
||||||
| BOOL_OR _ -> "||"
|
| BOOL_OR _ -> "||"
|
||||||
| BOOL_AND _ -> "&&"
|
| BOOL_AND _ -> "&&"
|
||||||
| Ident id -> id.Region.value
|
| Ident id -> id.Region.value
|
||||||
| Constr id -> id.Region.value
|
| Constr id -> id.Region.value
|
||||||
| Int i
|
| Int i
|
||||||
| Nat i
|
| Nat i
|
||||||
| Mutez i -> fst i.Region.value
|
| Mutez i -> fst i.Region.value
|
||||||
| String s -> s.Region.value
|
| String s -> s.Region.value
|
||||||
| Bytes b -> fst b.Region.value
|
| Bytes b -> fst b.Region.value
|
||||||
| Else _ -> "else"
|
| Else _ -> "else"
|
||||||
| False _ -> "false"
|
| False _ -> "false"
|
||||||
| If _ -> "if"
|
| If _ -> "if"
|
||||||
| Let _ -> "let"
|
| Let _ -> "let"
|
||||||
| Mod _ -> "mod"
|
| Mod _ -> "mod"
|
||||||
| NOT _ -> "!"
|
| NOT _ -> "!"
|
||||||
| Or _ -> "or"
|
| Or _ -> "or"
|
||||||
| Switch _ -> "switch"
|
| Switch _ -> "switch"
|
||||||
| True _ -> "true"
|
| True _ -> "true"
|
||||||
| Type _ -> "type"
|
| Type _ -> "type"
|
||||||
| C_None _ -> "None"
|
| C_None _ -> "None"
|
||||||
| C_Some _ -> "Some"
|
| C_Some _ -> "Some"
|
||||||
| EOF _ -> ""
|
| EOF _ -> ""
|
||||||
|
|
||||||
let to_string token ?(offsets=true) mode =
|
let to_string token ?(offsets=true) mode =
|
||||||
let region, val_str = proj_token token in
|
let region, val_str = proj_token token in
|
||||||
@ -216,6 +219,7 @@ type ident_err = Reserved_name
|
|||||||
type nat_err = Invalid_natural
|
type nat_err = Invalid_natural
|
||||||
| Non_canonical_zero_nat
|
| Non_canonical_zero_nat
|
||||||
type sym_err = Invalid_symbol
|
type sym_err = Invalid_symbol
|
||||||
|
type kwd_err = Invalid_keyword
|
||||||
|
|
||||||
(* LEXIS *)
|
(* LEXIS *)
|
||||||
|
|
||||||
@ -228,8 +232,7 @@ let keywords = [
|
|||||||
(fun reg -> Mod reg);
|
(fun reg -> Mod reg);
|
||||||
(fun reg -> Or reg);
|
(fun reg -> Or reg);
|
||||||
(fun reg -> True reg);
|
(fun reg -> True reg);
|
||||||
(fun reg -> Type reg);
|
(fun reg -> Type reg)]
|
||||||
]
|
|
||||||
|
|
||||||
(* See: http://caml.inria.fr/pub/docs/manual-ocaml/lex.html#sec86 and
|
(* See: http://caml.inria.fr/pub/docs/manual-ocaml/lex.html#sec86 and
|
||||||
https://github.com/facebook/reason/blob/master/src/reason-parser/reason_parser.mly *)
|
https://github.com/facebook/reason/blob/master/src/reason-parser/reason_parser.mly *)
|
||||||
@ -305,6 +308,14 @@ let lexicon : lexis =
|
|||||||
cstr = build constructors;
|
cstr = build constructors;
|
||||||
res = reserved}
|
res = reserved}
|
||||||
|
|
||||||
|
(* Keywords *)
|
||||||
|
|
||||||
|
let mk_kwd ident region =
|
||||||
|
match SMap.find_opt ident lexicon.kwd with
|
||||||
|
Some mk_kwd -> Ok (mk_kwd region)
|
||||||
|
| None -> Error Invalid_keyword
|
||||||
|
|
||||||
|
(* END OF HEADER *)
|
||||||
}
|
}
|
||||||
|
|
||||||
(* START LEXER DEFINITION *)
|
(* START LEXER DEFINITION *)
|
||||||
@ -380,6 +391,8 @@ let mk_mutez lexeme region =
|
|||||||
|
|
||||||
let eof region = EOF region
|
let eof region = EOF region
|
||||||
|
|
||||||
|
(* Making symbols *)
|
||||||
|
|
||||||
let mk_sym lexeme region =
|
let mk_sym lexeme region =
|
||||||
match lexeme with
|
match lexeme with
|
||||||
"-" -> Ok (MINUS region)
|
"-" -> Ok (MINUS region)
|
||||||
@ -408,13 +421,18 @@ let mk_sym lexeme region =
|
|||||||
| ")" -> Ok (RPAR region)
|
| ")" -> Ok (RPAR region)
|
||||||
|
|
||||||
(* Symbols specific to ReasonLIGO *)
|
(* Symbols specific to ReasonLIGO *)
|
||||||
| "..."-> Ok (ELLIPSIS region)
|
|
||||||
|
| "..." -> Ok (ELLIPSIS region)
|
||||||
| "=>" -> Ok (ARROW region)
|
| "=>" -> Ok (ARROW region)
|
||||||
| "==" -> Ok (EQEQ region)
|
| "==" -> Ok (EQEQ region)
|
||||||
| "!" -> Ok (NOT region)
|
| "!" -> Ok (NOT region)
|
||||||
| "++" -> Ok (CAT region)
|
| "++" -> Ok (CAT region)
|
||||||
|
|
||||||
|
(* Invalid symbols *)
|
||||||
|
|
||||||
| _ -> Error Invalid_symbol
|
| _ -> Error Invalid_symbol
|
||||||
|
|
||||||
|
|
||||||
(* Identifiers *)
|
(* Identifiers *)
|
||||||
|
|
||||||
let mk_ident' lexeme region lexicon =
|
let mk_ident' lexeme region lexicon =
|
||||||
@ -448,26 +466,26 @@ let is_ident = function
|
|||||||
| _ -> false
|
| _ -> false
|
||||||
|
|
||||||
let is_kwd = function
|
let is_kwd = function
|
||||||
| Else _
|
Else _
|
||||||
| False _
|
| False _
|
||||||
| If _
|
| If _
|
||||||
| Let _
|
| Let _
|
||||||
| Switch _
|
| Switch _
|
||||||
| Mod _
|
| Mod _
|
||||||
| Or _
|
| Or _
|
||||||
| True _
|
| True _
|
||||||
| Type _
|
| Type _ -> true
|
||||||
| _ -> false
|
| _ -> false
|
||||||
|
|
||||||
let is_constr = function
|
let is_constr = function
|
||||||
| Constr _
|
Constr _
|
||||||
| Ident _
|
| Ident _
|
||||||
| False _
|
| False _
|
||||||
| True _ -> true
|
| True _ -> true
|
||||||
| _ -> false
|
| _ -> false
|
||||||
|
|
||||||
let is_sym = function
|
let is_sym = function
|
||||||
| CAT _
|
CAT _
|
||||||
| MINUS _
|
| MINUS _
|
||||||
| PLUS _
|
| PLUS _
|
||||||
| SLASH _
|
| SLASH _
|
||||||
|
@ -370,7 +370,7 @@ ptuple:
|
|||||||
in PTuple {value=$1; region} }
|
in PTuple {value=$1; region} }
|
||||||
|
|
||||||
unit:
|
unit:
|
||||||
"(" ")" { {region = cover $1 $2; value = ghost, ghost} }
|
"(" ")" { {region = cover $1 $2; value = $1, $2} }
|
||||||
|
|
||||||
(* Expressions *)
|
(* Expressions *)
|
||||||
|
|
||||||
@ -790,7 +790,7 @@ sequence_or_record_in:
|
|||||||
|
|
||||||
sequence_or_record:
|
sequence_or_record:
|
||||||
"{" sequence_or_record_in "}" {
|
"{" sequence_or_record_in "}" {
|
||||||
let compound = Braces($1, $3) in
|
let compound = Braces ($1,$3) in
|
||||||
let region = cover $1 $3 in
|
let region = cover $1 $3 in
|
||||||
match $2 with
|
match $2 with
|
||||||
PaSequence s ->
|
PaSequence s ->
|
||||||
|
@ -19,9 +19,26 @@ module ExtParserLog =
|
|||||||
include ParserLog
|
include ParserLog
|
||||||
end
|
end
|
||||||
|
|
||||||
module M = ParserUnit.Make (IO)
|
module MyLexer = Lexer.Make (LexToken)
|
||||||
(Lexer.Make (LexToken))
|
|
||||||
(AST)
|
module Unit =
|
||||||
(ExtParser)
|
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
|
||||||
(ParErr)
|
|
||||||
(ExtParserLog)
|
(* Main *)
|
||||||
|
|
||||||
|
let () =
|
||||||
|
try Unit.run () with
|
||||||
|
(* Ad hoc errors from the parsers *)
|
||||||
|
|
||||||
|
SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
|
||||||
|
let () = Unit.close_all () in
|
||||||
|
let msg = "It looks like you are defining a function, \
|
||||||
|
however we do not\n\
|
||||||
|
understand the parameters declaration.\n\
|
||||||
|
Examples of valid functions:\n\
|
||||||
|
let x = (a: string, b: int) : int => 3;\n\
|
||||||
|
let x = (a: string) : string => \"Hello, \" ++ a;\n"
|
||||||
|
and reg = AST.expr_to_region expr in
|
||||||
|
let error = Unit.short_error ~offsets:IO.options#offsets
|
||||||
|
IO.options#mode msg reg
|
||||||
|
in Printf.eprintf "\027[31m%s\027[0m%!" error
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
(* Generic parser for LIGO *)
|
(* Generic parser for LIGO *)
|
||||||
|
|
||||||
|
module Region = Simple_utils.Region
|
||||||
|
|
||||||
module type PARSER =
|
module type PARSER =
|
||||||
sig
|
sig
|
||||||
(* The type of tokens, abstract syntax trees and expressions *)
|
(* The type of tokens, abstract syntax trees and expressions *)
|
||||||
@ -117,4 +119,9 @@ module Make (Lexer: Lexer.S)
|
|||||||
let header = header ^ trailer in
|
let header = header ^ trailer in
|
||||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
||||||
|
|
||||||
|
let short_error ?(offsets=true) mode msg (invalid_region: Region.t) =
|
||||||
|
let () = assert (not (invalid_region#is_ghost)) in
|
||||||
|
let header =
|
||||||
|
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
||||||
|
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
||||||
end
|
end
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
(* Generic parser API for LIGO *)
|
(* Generic parser API for LIGO *)
|
||||||
|
|
||||||
|
module Region = Simple_utils.Region
|
||||||
|
|
||||||
module type PARSER =
|
module type PARSER =
|
||||||
sig
|
sig
|
||||||
(* The type of tokens. *)
|
(* The type of tokens. *)
|
||||||
@ -56,5 +58,9 @@ module Make (Lexer: Lexer.S)
|
|||||||
|
|
||||||
exception Point of error
|
exception Point of error
|
||||||
|
|
||||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
val format_error :
|
||||||
|
?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||||
|
|
||||||
|
val short_error :
|
||||||
|
?offsets:bool -> [`Byte | `Point] -> message -> Region.t -> string
|
||||||
end
|
end
|
||||||
|
@ -14,7 +14,8 @@ module type Pretty =
|
|||||||
state -> ast -> unit
|
state -> ast -> unit
|
||||||
val mk_state :
|
val mk_state :
|
||||||
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
||||||
val print_tokens : state -> ast -> unit
|
val print_tokens :
|
||||||
|
state -> ast -> unit
|
||||||
end
|
end
|
||||||
|
|
||||||
module Make (IO: S)
|
module Make (IO: S)
|
||||||
@ -85,6 +86,9 @@ module Make (IO: S)
|
|||||||
|
|
||||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
||||||
|
|
||||||
|
let format_error = ParserFront.format_error
|
||||||
|
let short_error = ParserFront.short_error
|
||||||
|
|
||||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
||||||
|
|
||||||
@ -103,7 +107,7 @@ module Make (IO: S)
|
|||||||
|
|
||||||
(* Main *)
|
(* Main *)
|
||||||
|
|
||||||
let () =
|
let run () =
|
||||||
try
|
try
|
||||||
let ast =
|
let ast =
|
||||||
if IO.options#mono
|
if IO.options#mono
|
||||||
@ -131,6 +135,7 @@ module Make (IO: S)
|
|||||||
end
|
end
|
||||||
with
|
with
|
||||||
(* Lexing errors *)
|
(* Lexing errors *)
|
||||||
|
|
||||||
Lexer.Error err ->
|
Lexer.Error err ->
|
||||||
close_all ();
|
close_all ();
|
||||||
let msg =
|
let msg =
|
||||||
@ -139,6 +144,7 @@ module Make (IO: S)
|
|||||||
in prerr_string msg
|
in prerr_string msg
|
||||||
|
|
||||||
(* Incremental API of Menhir *)
|
(* Incremental API of Menhir *)
|
||||||
|
|
||||||
| ParserFront.Point point ->
|
| ParserFront.Point point ->
|
||||||
let () = close_all () in
|
let () = close_all () in
|
||||||
let error =
|
let error =
|
||||||
@ -147,6 +153,7 @@ module Make (IO: S)
|
|||||||
in eprintf "\027[31m%s\027[0m%!" error
|
in eprintf "\027[31m%s\027[0m%!" error
|
||||||
|
|
||||||
(* Monolithic API of Menhir *)
|
(* Monolithic API of Menhir *)
|
||||||
|
|
||||||
| Parser.Error ->
|
| Parser.Error ->
|
||||||
let () = close_all () in
|
let () = close_all () in
|
||||||
let invalid, valid_opt =
|
let invalid, valid_opt =
|
||||||
@ -162,6 +169,7 @@ module Make (IO: S)
|
|||||||
in eprintf "\027[31m%s\027[0m%!" error
|
in eprintf "\027[31m%s\027[0m%!" error
|
||||||
|
|
||||||
(* I/O errors *)
|
(* I/O errors *)
|
||||||
|
|
||||||
| Sys_error msg -> Utils.highlight msg
|
| Sys_error msg -> Utils.highlight msg
|
||||||
|
|
||||||
end
|
end
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
function get_chain_id (const tt : chain_id) : chain_id is
|
function chain_id (const tt : chain_id) : chain_id is
|
||||||
block {
|
block {
|
||||||
var toto : chain_id := get_chain_id ;
|
var toto : chain_id := get_chain_id ;
|
||||||
} with ( toto )
|
} with ( toto )
|
@ -1759,7 +1759,7 @@ let chain_id () : unit result =
|
|||||||
Tezos_base__TzPervasives.Chain_id.zero in
|
Tezos_base__TzPervasives.Chain_id.zero in
|
||||||
let make_input = e_chain_id pouet in
|
let make_input = e_chain_id pouet in
|
||||||
let make_expected = e_chain_id pouet in
|
let make_expected = e_chain_id pouet in
|
||||||
let%bind () = expect_eq program "get_chain_id" make_input make_expected in
|
let%bind () = expect_eq program "chain_id" make_input make_expected in
|
||||||
ok ()
|
ok ()
|
||||||
|
|
||||||
let key_hash () : unit result =
|
let key_hash () : unit result =
|
||||||
|
Loading…
Reference in New Issue
Block a user