Added support for mtz literals.
This commit is contained in:
parent
72aabf1f49
commit
14ad75892c
@ -578,6 +578,7 @@ and arith_expr =
|
||||
| Neg of minus un_op reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
| Nat of (Lexer.lexeme * Z.t) reg
|
||||
| Mtz of (Lexer.lexeme * Z.t) reg
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
@ -726,7 +727,8 @@ and arith_expr_to_region = function
|
||||
| Mod {region; _}
|
||||
| Neg {region; _}
|
||||
| Int {region; _}
|
||||
| Nat {region; _} -> region
|
||||
| Nat {region; _}
|
||||
| Mtz {region; _} -> region
|
||||
|
||||
and string_expr_to_region = function
|
||||
Cat {region; _}
|
||||
|
@ -562,6 +562,7 @@ and arith_expr =
|
||||
| Neg of minus un_op reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
| Nat of (Lexer.lexeme * Z.t) reg
|
||||
| Mtz of (Lexer.lexeme * Z.t) reg
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
|
@ -35,6 +35,7 @@ type t =
|
||||
| Bytes of (lexeme * Hex.t) Region.reg
|
||||
| Int of (lexeme * Z.t) Region.reg
|
||||
| Nat of (lexeme * Z.t) Region.reg
|
||||
| Mtz of (lexeme * Z.t) Region.reg
|
||||
| Ident of lexeme Region.reg
|
||||
| Constr of lexeme Region.reg
|
||||
|
||||
@ -146,6 +147,7 @@ val mk_string : lexeme -> Region.t -> token
|
||||
val mk_bytes : lexeme -> Region.t -> token
|
||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_nat : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_mtz : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||
val mk_constr : lexeme -> Region.t -> token
|
||||
val mk_sym : lexeme -> Region.t -> token
|
||||
|
@ -33,6 +33,7 @@ type t =
|
||||
| Bytes of (lexeme * Hex.t) Region.reg
|
||||
| Int of (lexeme * Z.t) Region.reg
|
||||
| Nat of (lexeme * Z.t) Region.reg
|
||||
| Mtz of (lexeme * Z.t) Region.reg
|
||||
| Ident of lexeme Region.reg
|
||||
| Constr of lexeme Region.reg
|
||||
|
||||
@ -160,6 +161,9 @@ let proj_token = function
|
||||
| Nat Region.{region; value = s,n} ->
|
||||
region, sprintf "Nat (\"%s\", %s)" s (Z.to_string n)
|
||||
|
||||
| Mtz Region.{region; value = s,n} ->
|
||||
region, sprintf "Mtz (\"%s\", %s)" s (Z.to_string n)
|
||||
|
||||
| Ident Region.{region; value} ->
|
||||
region, sprintf "Ident \"%s\"" value
|
||||
|
||||
@ -254,8 +258,9 @@ let to_lexeme = function
|
||||
|
||||
String s -> s.Region.value
|
||||
| Bytes b -> fst b.Region.value
|
||||
| Int i -> fst i.Region.value
|
||||
| Nat i -> fst i.Region.value
|
||||
| Int i
|
||||
| Nat i
|
||||
| Mtz i -> fst i.Region.value
|
||||
| Ident id
|
||||
| Constr id -> id.Region.value
|
||||
|
||||
@ -488,6 +493,15 @@ let mk_nat lexeme region =
|
||||
then Error Non_canonical_zero
|
||||
else Ok (Nat Region.{region; value = lexeme, z})
|
||||
|
||||
let mk_mtz lexeme region =
|
||||
let z =
|
||||
Str.(global_replace (regexp "_") "" lexeme) |>
|
||||
Str.(global_replace (regexp "mtz") "") |>
|
||||
Z.of_string in
|
||||
if Z.equal z Z.zero && lexeme <> "0mtz"
|
||||
then Error Non_canonical_zero
|
||||
else Ok (Mtz Region.{region; value = lexeme, z})
|
||||
|
||||
let eof region = EOF region
|
||||
|
||||
let mk_sym lexeme region =
|
||||
|
@ -65,6 +65,7 @@ module type TOKEN =
|
||||
val mk_bytes : lexeme -> Region.t -> token
|
||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_nat : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_mtz : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||
val mk_constr : lexeme -> Region.t -> token
|
||||
val mk_sym : lexeme -> Region.t -> token
|
||||
|
@ -106,6 +106,7 @@ module type TOKEN =
|
||||
val mk_bytes : lexeme -> Region.t -> token
|
||||
val mk_int : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_nat : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_mtz : lexeme -> Region.t -> (token, int_err) result
|
||||
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
|
||||
val mk_constr : lexeme -> Region.t -> token
|
||||
val mk_sym : lexeme -> Region.t -> token
|
||||
@ -419,6 +420,13 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
| Error Token.Non_canonical_zero ->
|
||||
fail region Non_canonical_zero
|
||||
|
||||
let mk_mtz state buffer =
|
||||
let region, lexeme, state = sync state buffer in
|
||||
match Token.mk_mtz lexeme region with
|
||||
Ok token -> token, state
|
||||
| Error Token.Non_canonical_zero ->
|
||||
fail region Non_canonical_zero
|
||||
|
||||
let mk_ident state buffer =
|
||||
let region, lexeme, state = sync state buffer in
|
||||
match Token.mk_ident lexeme region with
|
||||
@ -482,17 +490,18 @@ rule init state = parse
|
||||
| _ { rollback lexbuf; scan state lexbuf }
|
||||
|
||||
and scan state = parse
|
||||
nl { scan (push_newline state lexbuf) lexbuf }
|
||||
| ' '+ { scan (push_space state lexbuf) lexbuf }
|
||||
| '\t'+ { scan (push_tabs state lexbuf) lexbuf }
|
||||
nl { scan (push_newline state lexbuf) lexbuf }
|
||||
| ' '+ { scan (push_space state lexbuf) lexbuf }
|
||||
| '\t'+ { scan (push_tabs state lexbuf) lexbuf }
|
||||
|
||||
| ident { mk_ident state lexbuf |> enqueue }
|
||||
| constr { mk_constr state lexbuf |> enqueue }
|
||||
| bytes { (mk_bytes seq) state lexbuf |> enqueue }
|
||||
| natural 'n' { mk_nat state lexbuf |> enqueue }
|
||||
| integer { mk_int state lexbuf |> enqueue }
|
||||
| symbol { mk_sym state lexbuf |> enqueue }
|
||||
| eof { mk_eof state lexbuf |> enqueue }
|
||||
| ident { mk_ident state lexbuf |> enqueue }
|
||||
| constr { mk_constr state lexbuf |> enqueue }
|
||||
| bytes { (mk_bytes seq) state lexbuf |> enqueue }
|
||||
| natural 'n' { mk_nat state lexbuf |> enqueue }
|
||||
| natural "mtz" { mk_mtz state lexbuf |> enqueue }
|
||||
| integer { mk_int state lexbuf |> enqueue }
|
||||
| symbol { mk_sym state lexbuf |> enqueue }
|
||||
| eof { mk_eof state lexbuf |> enqueue }
|
||||
|
||||
| '"' { let opening, _, state = sync state lexbuf in
|
||||
let thread = {opening; len=1; acc=['"']} in
|
||||
|
@ -9,6 +9,7 @@
|
||||
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes
|
||||
%token <(LexToken.lexeme * Z.t) Region.reg> Int
|
||||
%token <(LexToken.lexeme * Z.t) Region.reg> Nat
|
||||
%token <(LexToken.lexeme * Z.t) Region.reg> Mtz
|
||||
%token <LexToken.lexeme Region.reg> Ident
|
||||
%token <LexToken.lexeme Region.reg> Constr
|
||||
|
||||
|
@ -215,6 +215,7 @@ variant:
|
||||
let region = cover $1.region $3.region
|
||||
and value = {constr = $1; kwd_of = $2; product = $3}
|
||||
in {region; value} }
|
||||
(* TODO: Unary constructors *)
|
||||
|
||||
record_type:
|
||||
Record series(field_decl,End) {
|
||||
@ -945,6 +946,7 @@ unary_expr:
|
||||
core_expr:
|
||||
Int { EArith (Int $1) }
|
||||
| Nat { EArith (Nat $1) }
|
||||
| Mtz { EArith (Mtz $1) }
|
||||
| var { EVar $1 }
|
||||
| String { EString (String $1) }
|
||||
| Bytes { EBytes $1 }
|
||||
|
@ -512,8 +512,9 @@ and print_arith_expr = function
|
||||
print_expr arg1; print_token op "mod"; print_expr arg2
|
||||
| Neg {value = {op; arg}; _} ->
|
||||
print_token op "-"; print_expr arg
|
||||
| Int i -> print_int i
|
||||
| Nat i -> print_int i
|
||||
| Int i
|
||||
| Nat i
|
||||
| Mtz i -> print_int i
|
||||
|
||||
and print_string_expr = function
|
||||
Cat {value = {arg1; op; arg2}; _} ->
|
||||
|
@ -171,6 +171,9 @@ let rec simpl_expression (t:Raw.expr) : ae result =
|
||||
| EArith (Nat n) ->
|
||||
let n = Z.to_int @@ snd @@ n.value in
|
||||
ok @@ make_e_a @@ E_literal (Literal_nat n)
|
||||
| EArith (Mtz n) ->
|
||||
let n = Z.to_int @@ snd @@ n.value in
|
||||
ok @@ make_e_a @@ E_literal (Literal_tez n)
|
||||
| EArith _ -> simple_fail "arith: not supported yet"
|
||||
| EString (String s) ->
|
||||
ok @@ make_e_a @@ E_literal (Literal_string s.value)
|
||||
|
Loading…
Reference in New Issue
Block a user