multifix + restructure

This commit is contained in:
Galfour 2019-04-05 15:37:37 +00:00
parent 8edada0f6c
commit aca086e5e7
78 changed files with 2128 additions and 55 deletions

View File

@ -5,8 +5,9 @@
tezos-utils
tezos-micheline
meta-michelson
ligo-helpers
ligo-parser
ligo_helpers
ligo_parser
multifix
)
(preprocess
(pps ppx_let)
@ -18,5 +19,4 @@
(name runtest)
(action (run test/test.exe))
(deps (glob_files contracts/*))
)

View File

@ -1,8 +0,0 @@
(library
(libraries
tezos-base
tezos-utils
)
(name ligo_helpers)
(public_name ligo-helpers)
)

View File

@ -1,22 +0,0 @@
name: "ligo-helpers"
opam-version: "2.0"
version: "1.0"
maintainer: "gabriel.alfour@gmail.com"
authors: [ "Galfour" ]
homepage: "https://gitlab.com/gabriel.alfour/tezos"
bug-reports: "https://gitlab.com/gabriel.alfour/tezos/issues"
dev-repo: "git+https://gitlab.com/gabriel.alfour/tezos.git"
license: "MIT"
depends: [
"ocamlfind" { build }
"dune" { build & >= "1.0.1" }
"meta-michelson"
"tezos-utils"
"tezos-base"
]
build: [
[ "dune" "build" "-p" name "-j" jobs ]
]
url {
src: "https://gitlab.com/gabriel.alfour/tezos/-/archive/master/tezos.tar.gz"
}

View File

@ -1,19 +0,0 @@
opam-version : "2.0"
version : "1.0"
maintainer : "gabriel.alfour@gmail.com"
authors : [ "Galfour" ]
homepage : "https://gitlab.com/gabriel.alfour/ligo-parser"
bug-reports : "https://gitlab.com/gabriel.alfour/ligo-parser/issues"
dev-repo : "git+https://gitlab.com/gabriel.alfour/ligo-parser.git"
license : "MIT"
depends : [ "dune" "menhir" "hex" "zarith" "getopt" "uutf" ]
build : [
[ "sh" "-c" "printf 'let version = \"%s\"' \"$(git describe --always --dirty --abbrev=0)\" > Version.ml" ]
[ "dune" "build" "-p" name "-j" jobs ]
]
url {
src: "https://gitlab.com/gabriel.alfour/ligo-parser/-/archive/master/ligo-parser.tar.gz"
}

View File

@ -13,8 +13,6 @@ depends: [
"ppx_let"
"tezos-utils"
"meta-michelson"
"ligo-helpers"
"ligo-parser"
]
build: [
[ "dune" "build" "-p" name "-j" jobs ]

View File

@ -0,0 +1,9 @@
(library
(name ligo_helpers)
(public_name ligo.helpers)
(libraries
tezos-base
tezos-utils
)
;; (modules x_map option wrap tree location environment dictionary PP trace)
)

View File

@ -0,0 +1,19 @@
(* AST *)
(* Language *)
(* Singletons *)
type variable = string
(* Hierarchies *)
type arith =
| Let_in of (variable Location.wrap * arith Location.wrap * arith Location.wrap)
| Addition of (arith Location.wrap * arith Location.wrap)
| Substraction of (arith Location.wrap * arith Location.wrap)
| Multiplication of (arith Location.wrap * arith Location.wrap)
| Division of (arith Location.wrap * arith Location.wrap)
| List of ((arith Location.wrap list))
| Arith_variable of (variable Location.wrap)
(* Entry point *)
type entry_point = arith

View File

@ -0,0 +1,63 @@
(library
(name multifix)
(public_name ligo.multifix)
(libraries lex)
(modules ast parser location user)
)
;; Generating parser
(rule
(targets parser.ml parser.mli)
(deps parser.mly ast.ml)
(action (system "menhir --external-tokens Lex.Token lex/token.mly parser.mly --base parser"))
(mode promote-until-clean)
)
(rule
(targets parser.mly)
(deps partial_parser.mly pre_parser.mly)
(action (system "cat pre_parser.mly partial_parser.mly > parser.mly"))
(mode promote-until-clean)
)
(rule
(targets partial_parser.mly)
(deps generator.exe)
(action (system "./generator.exe parser > partial_parser.mly"))
(mode promote-until-clean)
)
;; Generating AST
(rule
(targets ast.ml)
(deps generator.exe)
(action (system "./generator.exe ast > ast.ml"))
(mode promote-until-clean)
)
;; Generating Generator
(executable
(name generator)
(libraries
ocamlgraph
lex
)
(modules generator)
)
;; Tests
(alias
(name test-user)
(deps user.exe foo.test)
(action (system "./user.exe foo.test"))
)
(alias
(name runtest)
(deps generator.exe)
(action (system "./generator.exe parser ; ./generator.exe ast"))
)

View File

@ -0,0 +1 @@
let toto = at * bo in list [ toto ; tata ; titi ]

View File

@ -0,0 +1,323 @@
module N = struct
type 'a t = {
content : 'a ;
name : string ;
}
let name name content = { name ; content }
let destruct {name ; content} = (name, content)
let get_name x = x.name
let get_content x = x.content
end
let list_filter_map f =
let rec aux acc lst = match lst with
| [] -> List.rev acc
| hd :: tl -> aux (
match f hd with
| Some x -> x :: acc
| None -> acc
) tl
in
aux []
module Ne_list = struct
type 'a t = 'a * 'a list
let of_list lst = List.(hd lst, tl lst)
let iter f (hd, tl : _ t) = f hd ; List.iter f tl
let map f (hd, tl : _ t) = f hd, List.map f tl
let mapi f (hd, tl : _ t) =
let lst = List.mapi f (hd::tl) in
of_list lst
let concat (hd, tl : _ t) = hd @ List.concat tl
let rev (hd, tl : _ t) =
match tl with
| [] -> (hd, [])
| lst ->
let r = List.rev lst in
(List.hd r, List.tl r @ [hd])
end
module PP = struct
open Format
let string : formatter -> string -> unit = fun ppf s -> fprintf ppf "%s" s
let tag tag : formatter -> unit -> unit = fun ppf () -> fprintf ppf tag
let new_line : formatter -> unit -> unit = tag "@;"
let rec new_lines n ppf () =
match n with
| 0 -> new_line ppf ()
| n -> new_line ppf () ; new_lines (n-1) ppf ()
let const const : formatter -> unit -> unit = fun ppf () -> fprintf ppf "%s" const
let comment : formatter -> string -> unit = fun ppf s -> fprintf ppf "(* %s *)" s
let list_sep value separator = pp_print_list ~pp_sep:separator value
let ne_list_sep value separator ppf (hd, tl) =
value ppf hd ;
separator ppf () ;
pp_print_list ~pp_sep:separator value ppf tl
end
module Token = Lex.Token
type token = Token.token
module O = struct
type element =
| Named of string (* Named rule, like type_var *)
| Token of token
| List of ([`Trail | `Lead | `Separator] * token * token * token)
| Current
| Lower (* Lower precedence *)
type operator = element list
type n_operator = operator N.t
type n_operators = n_operator list
type level = n_operators N.t
type hierarchy = level Ne_list.t
type n_hierarchy = hierarchy N.t
type singleton = {
type_name : string ;
type_expression : string ;
menhir_rule : string ;
menhir_code : string ;
}
type language = {
entry_point : string ;
singletons : singleton list ;
hierarchies : n_hierarchy list ;
}
let get_op : n_operator -> operator = N.get_content
let singleton type_name type_expression menhir_rule menhir_code =
{type_name ; type_expression ; menhir_rule ; menhir_code}
let language entry_point singletons hierarchies = {entry_point ; singletons ; hierarchies}
let name_hierarchy name : n_operators list -> n_hierarchy = fun nopss ->
let nopss' = Ne_list.of_list nopss in
let name_i = fun i x -> N.name (name ^ "_" ^ (string_of_int i)) x in
let levels : hierarchy = Ne_list.mapi name_i nopss' in
N.name name levels
end
module Check = struct
open O
let well_formed : language -> unit = fun l ->
let elements : element list -> unit = fun es ->
let rec aux = fun es ->
match es with
| [] -> ()
| [ _ ] -> ()
| (List _ | Named _ | Current | Lower) :: (List _ | Named _ | Current | Lower) :: _ ->
raise (Failure "two non-token separated ops in a row")
| _ :: tl -> aux tl
in
(if (List.length es < 2) then raise (Failure "operator is too short")) ;
aux es in
let op : n_operator -> unit = fun x -> elements @@ N.get_content x in
let level : level -> unit = fun l -> List.iter op @@ N.get_content l in
let hierarchy : n_hierarchy -> unit = fun h -> Ne_list.iter level @@ N.get_content h in
List.iter hierarchy l.hierarchies
let associativity : language -> unit = fun l ->
let level : level -> unit = fun l ->
let aux : ([`Left | `Right | `None] as 'a) -> n_operator -> 'a = fun ass nop ->
let op = N.get_content nop in
match ass, List.hd op, List.nth op (List.length op - 1) with
| _, Lower, Lower -> raise (Failure "double assoc")
| `None, Lower, _ -> `Left
| `None, _, Lower -> `Right
| `Left, _, Lower -> raise (Failure "different assocs")
| `Right, Lower, _ -> raise (Failure "different assocs")
| m, _, _ -> m
in
let _assert = List.fold_left aux `None (N.get_content l) in
()
in
let hierarchy : n_hierarchy -> unit = fun h ->
Ne_list.iter level (N.get_content h) in
List.iter hierarchy l.hierarchies
end
module Print_AST = struct
open Format
let singleton : _ -> O.singleton -> _ = fun ppf s ->
fprintf ppf "type %s = %s" s.type_name s.type_expression
let n_operator level_name : _ -> O.n_operator -> _ = fun ppf nop ->
let type_elements =
let aux : O.element -> string option = fun e ->
match e with
| Named s -> Some (s ^ " Location.wrap")
| List _ -> Some ("(" ^ level_name ^ " Location.wrap list)")
| Token _ -> None
| Current | Lower -> Some (level_name ^ " Location.wrap") in
list_filter_map aux (N.get_content nop) in
let type_element = fun ppf te -> fprintf ppf "%s" te in
fprintf ppf "| %s of (%a)"
(N.get_name nop)
PP.(list_sep type_element (const " * ")) type_elements
let n_hierarchy : _ -> O.n_hierarchy -> _ = fun ppf nh ->
let levels = Ne_list.map N.get_content (N.get_content nh) in
let nops = Ne_list.concat levels in
let name = N.get_name nh in
fprintf ppf "type %s =@.@[%a@]"
name
PP.(list_sep (n_operator name) new_line) nops
let language : _ -> O.language -> _ = fun ppf l ->
fprintf ppf "%a@.@." PP.comment "Language" ;
fprintf ppf " %a@.%a@.@." PP.comment "Singletons" PP.(list_sep singleton new_line) l.singletons ;
fprintf ppf " %a@.%a@." PP.comment "Hierarchies" PP.(list_sep n_hierarchy (new_lines 2)) l.hierarchies ;
fprintf ppf " %a@.type entry_point = %s@.@." PP.comment "Entry point" l.entry_point ;
()
end
module Print_Grammar = struct
open Format
let singleton : _ -> O.singleton -> _ = fun ppf s ->
fprintf ppf "%s : %s@. @[<v>{@; @[<v>let loc = Location.make $startpos $endpos in@;Location.wrap ~loc %s@]@;}@;@]"
s.type_name s.menhir_rule s.menhir_code
let letters = [| "a" ; "b" ; "c" ; "d" ; "e" ; "f" ; "g" ; "h" ; "i" ; "j" |]
let n_operator_rule prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
let i = ref 0 in
let element : _ -> O.element -> _ = fun ppf element ->
(match element with
| Token t -> i := !i - 1 ; PP.string ppf @@ Token.to_string t
| List (mode, beg, sep, end_) ->
fprintf ppf "%s %s = %s(%s, %s) %s"
(Token.to_string beg)
letters.(!i)
(match mode with | `Lead -> "lead_list" | `Trail -> "trail_list" | `Separator -> "separated_list")
(Token.to_string sep)
cur_lvl_name
(Token.to_string end_)
| Named n ->
fprintf ppf "%s = %s" letters.(!i) n
| Current ->
fprintf ppf "%s = %s" letters.(!i) cur_lvl_name
| Lower ->
fprintf ppf "%s = %s" letters.(!i) prev_lvl_name
) ;
i := !i + 1
in
PP.(list_sep element (const " ")) ppf (N.get_content nop)
let n_operator_code : _ -> O.n_operator -> _ = fun ppf nop ->
let (name, elements) = N.destruct nop in
let elements' =
let i = ref 0 in
let aux : O.element -> _ = fun e ->
let r =
match e with
| Token _ -> i := !i - 1 ; None
| List _ | Named _ | Current | Lower -> Some letters.(!i)
in i := !i + 1 ; r
in
list_filter_map aux elements in
fprintf ppf "%s (%a)" name PP.(list_sep string (const " , ")) elements'
let n_operator prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
let name = N.get_name nop in
fprintf ppf "%a@;| %a@; @[<v>{@; @[let loc = Location.make $startpos $endpos in@;Location.wrap ~loc %@%@ %a@]@;}@]" PP.comment name
(n_operator_rule prev_lvl_name cur_lvl_name) nop
n_operator_code nop
let level prev_lvl_name : _ -> O.level -> _ = fun ppf l ->
let name = N.get_name l in
match prev_lvl_name with
| "" -> (
fprintf ppf "%s :@. @[<v>%a@]" name
PP.(list_sep (n_operator prev_lvl_name name) new_line) (N.get_content l) ;
)
| _ -> (
fprintf ppf "%s :@. @[<v>%a@;| %s { $1 }@]" name
PP.(list_sep (n_operator prev_lvl_name name) new_line) (N.get_content l)
prev_lvl_name
)
let n_hierarchy : _ -> O.n_hierarchy -> _ = fun ppf nh ->
let name = N.get_name nh in
fprintf ppf "%a@.%%inline %s : %s_0 { $1 }@.@;" PP.comment ("Top-level for " ^ name) name name;
let (hd, tl) = Ne_list.rev @@ N.get_content nh in
fprintf ppf "%a" (level "") hd ;
let aux prev_name lvl =
PP.new_lines 2 ppf () ;
fprintf ppf "%a" (level prev_name) lvl ;
N.get_name lvl
in
let _last_name = List.fold_left aux (N.get_name hd) tl in
()
let language : _ -> O.language -> _ = fun ppf l ->
fprintf ppf "%a@.@." PP.comment "Generated Language" ;
fprintf ppf "entry_point : %s EOF { $1 }@.@." l.entry_point ;
fprintf ppf "%a@.@." PP.comment "Singletons" ;
fprintf ppf "@[%a@]@.@." (PP.list_sep singleton PP.new_line) l.singletons ;
fprintf ppf "%a@.@." PP.comment "Hierarchies" ;
fprintf ppf "@[%a@]" (PP.list_sep n_hierarchy PP.new_line) l.hierarchies ;
end
let variable = O.singleton "variable" "string" "NAME" "$1"
let infix : string -> [`Left | `Right] -> token -> O.n_operator = fun name assoc t ->
let open O in
match assoc with
| `Left -> N.name name [Current ; Token t ; Lower]
| `Right -> N.name name [Current ; Token t ; Lower]
let list = N.name "List" [
O.Token Token.LIST ; List (`Lead, Token.LSQUARE, Token.SEMICOLON, Token.RSQUARE) ;
]
let let_in : O.n_operator = N.name "Let_in" [
O.Token Token.LET ; Named "variable" ;
O.Token Token.EQUAL ; Current ;
O.Token Token.IN ; Current ;
]
let addition = infix "Addition" `Left Token.PLUS
let substraction = infix "Substraction" `Left Token.MINUS
let multiplication = infix "Multiplication" `Left Token.TIMES
let division = infix "Division" `Left Token.DIV
let arith_variable : O.n_operator = N.name "Arith_variable" [ O.Named "variable" ]
let arith = O.name_hierarchy "arith" [
[let_in] ;
[addition ; substraction] ;
[multiplication ; division] ;
[list] ;
[arith_variable] ;
]
let language = O.language "arith" [variable] [arith]
let () =
let argn = Array.length Sys.argv in
if argn = 1 then exit 1 ;
let arg = Sys.argv.(1) in
match arg with
| "parser" -> (
Format.printf "%a@.%a\n" PP.comment "Full Grammar" Print_Grammar.language language
)
| "ast" -> (
Format.printf "%a@.%a\n" PP.comment "AST" Print_AST.language language
)
| _ -> exit 1

View File

@ -0,0 +1,24 @@
(library
(name lex)
(public_name ligo.multifix.lex)
(modules token token_type lexer)
)
(rule
(targets token_type.ml token_type.mli)
(deps token.mly)
(action (system "menhir --only-tokens token.mly --base token_type"))
(mode promote-until-clean)
)
(alias
(name lexer.mll)
(deps token.ml)
)
(rule
(targets lexer.ml)
(deps token.ml lexer.mll)
(action (system "ocamllex lexer.mll"))
(mode promote-until-clean)
)

View File

@ -0,0 +1,410 @@
# 1 "lexer.mll"
open Token
exception Error of string
exception Unexpected_character of string
# 9 "lexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\238\255\239\255\075\000\241\255\242\255\243\255\244\255\
\245\255\246\255\247\255\248\255\160\000\235\000\041\000\014\000\
\254\255\001\000\001\000\255\255\038\000\001\000\252\255\054\001\
\129\001\204\001\023\002\098\002\173\002\248\002";
Lexing.lex_backtrk =
"\255\255\255\255\255\255\017\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\017\000\017\000\017\000\002\000\
\255\255\000\000\017\000\255\255\255\255\255\255\255\255\015\000\
\015\000\015\000\004\000\015\000\006\000\005\000";
Lexing.lex_default =
"\001\000\000\000\000\000\255\255\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\255\255\021\000\255\255\
\000\000\255\255\255\255\000\000\021\000\255\255\000\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\016\000\017\000\017\000\000\000\018\000\019\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\016\000\000\000\014\000\022\000\000\000\000\000\000\000\000\000\
\000\000\000\000\006\000\008\000\000\000\007\000\000\000\005\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\015\000\000\000\009\000\000\000\004\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\255\255\000\000\000\000\255\255\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\011\000\000\000\010\000\000\000\000\000\
\000\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\012\000\003\000\003\000\013\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\020\000\000\000\000\000\
\000\000\000\000\000\000\000\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\000\000\000\000\
\000\000\000\000\023\000\000\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\000\000\000\000\000\000\000\000\023\000\
\002\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\029\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\000\000\255\255\000\000\
\000\000\255\255\000\000\000\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\000\000\000\000\
\000\000\000\000\023\000\000\000\023\000\023\000\023\000\023\000\
\025\000\023\000\023\000\023\000\024\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\000\000\000\000\000\000\000\000\023\000\000\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\000\000\000\000\000\000\000\000\
\023\000\000\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\027\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\000\000\
\000\000\000\000\000\000\023\000\000\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\026\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\000\000\000\000\000\000\000\000\023\000\000\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\000\000\000\000\000\000\
\000\000\023\000\000\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\028\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\000\000\000\000\000\000\000\000\023\000\000\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\000\000\000\000\000\000\000\000\023\000\
\000\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\018\000\255\255\000\000\017\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\000\000\021\000\255\255\255\255\255\255\255\255\
\255\255\255\255\000\000\000\000\255\255\000\000\255\255\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\255\255\000\000\255\255\000\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\020\000\255\255\255\255\014\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\000\000\255\255\000\000\255\255\255\255\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\014\000\255\255\255\255\
\255\255\255\255\255\255\255\255\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\255\255\255\255\
\255\255\255\255\003\000\255\255\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\255\255\255\255\255\255\255\255\012\000\
\000\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\255\255\020\000\255\255\
\255\255\014\000\255\255\255\255\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\255\255\255\255\
\255\255\255\255\013\000\255\255\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\255\255\255\255\255\255\255\255\023\000\255\255\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\023\000\023\000\023\000\023\000\023\000\023\000\023\000\
\023\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\255\255\255\255\255\255\255\255\
\024\000\255\255\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\
\024\000\024\000\024\000\024\000\025\000\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\255\255\
\255\255\255\255\255\255\025\000\255\255\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\
\025\000\025\000\025\000\025\000\025\000\025\000\025\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\255\255\255\255\255\255\255\255\026\000\255\255\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\255\255\255\255\255\255\
\255\255\027\000\255\255\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\028\000\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\255\255\255\255\255\255\255\255\028\000\255\255\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\
\029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\255\255\255\255\255\255\255\255\029\000\
\255\255\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\
\029\000\029\000\029\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec token lexbuf =
__ocaml_lex_token_rec lexbuf 0
and __ocaml_lex_token_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 17 "lexer.mll"
( Lexing.new_line lexbuf; token lexbuf )
# 305 "lexer.ml"
| 1 ->
# 19 "lexer.mll"
( token lexbuf )
# 310 "lexer.ml"
| 2 ->
let
# 20 "lexer.mll"
i
# 316 "lexer.ml"
= Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_curr_pos in
# 21 "lexer.mll"
( INT (int_of_string i) )
# 320 "lexer.ml"
| 3 ->
let
# 22 "lexer.mll"
s
# 326 "lexer.ml"
= Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos (lexbuf.Lexing.lex_curr_pos + -1) in
# 23 "lexer.mll"
( STRING s )
# 330 "lexer.ml"
| 4 ->
# 24 "lexer.mll"
( LET )
# 335 "lexer.ml"
| 5 ->
# 25 "lexer.mll"
( IN )
# 340 "lexer.ml"
| 6 ->
# 26 "lexer.mll"
( LIST )
# 345 "lexer.ml"
| 7 ->
# 27 "lexer.mll"
( LSQUARE )
# 350 "lexer.ml"
| 8 ->
# 28 "lexer.mll"
( RSQUARE )
# 355 "lexer.ml"
| 9 ->
# 29 "lexer.mll"
( SEMICOLON )
# 360 "lexer.ml"
| 10 ->
# 30 "lexer.mll"
( PLUS )
# 365 "lexer.ml"
| 11 ->
# 31 "lexer.mll"
( MINUS )
# 370 "lexer.ml"
| 12 ->
# 32 "lexer.mll"
( TIMES )
# 375 "lexer.ml"
| 13 ->
# 33 "lexer.mll"
( DIV )
# 380 "lexer.ml"
| 14 ->
# 34 "lexer.mll"
( EQUAL )
# 385 "lexer.ml"
| 15 ->
let
# 35 "lexer.mll"
v
# 391 "lexer.ml"
= Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_curr_pos in
# 36 "lexer.mll"
( NAME v )
# 395 "lexer.ml"
| 16 ->
# 37 "lexer.mll"
( EOF )
# 400 "lexer.ml"
| 17 ->
# 39 "lexer.mll"
( raise (Unexpected_character (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) )
# 405 "lexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_token_rec lexbuf __ocaml_lex_state
;;

View File

@ -0,0 +1,39 @@
{
open Token
exception Error of string
exception Unexpected_character of string
}
(* This rule analyzes a single line and turns it into a stream of
tokens. *)
rule token = parse
(*
| "//" ([^ '\n']* ) (['\n' '\r']+)
{ Lexing.new_line lexbuf ; token lexbuf }
*)
| ('\r'? '\n' '\r'?)
{ Lexing.new_line lexbuf; token lexbuf }
| [' ' '\t']
{ token lexbuf }
| ['0'-'9']+ as i
{ INT (int_of_string i) }
| '"' ( [^ '"' '\\'] | ( '\\' [^ '"'] ) ) as s '"'
{ STRING s }
| "let" { LET }
| "in" { IN }
| "list" { LIST }
| "[" { LSQUARE }
| "]" { RSQUARE }
| ";" { SEMICOLON }
| "+" { PLUS }
| "-" { MINUS }
| "*" { TIMES }
| "/" { DIV }
| "=" { EQUAL }
| (['a'-'z']['a'-'z''A'-'Z''0'-'9''_']+) as v
{ NAME v }
| eof { EOF }
| _
{ raise (Unexpected_character (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }

View File

@ -0,0 +1,18 @@
include Token_type
let to_string : token -> string = function
| TIMES -> "TIMES"
| STRING _ -> "STRING"
| NAME _ -> "NAME s"
| INT _ -> "INT n"
| SEMICOLON -> "SEMICOLON"
| RSQUARE -> "RSQUARE"
| PLUS -> "PLUS"
| MINUS -> "MINUS"
| LSQUARE -> "LSQUARE"
| LIST -> "LIST"
| LET -> "LET"
| IN -> "IN"
| EQUAL -> "EQUAL"
| EOF -> "EOF"
| DIV -> "DIV"

View File

@ -0,0 +1,9 @@
%token EOF
%token <int> INT
%token <string> STRING
%token <string> NAME
%token LET IN EQUAL
%token PLUS MINUS TIMES DIV
%token LIST LSQUARE RSQUARE SEMICOLON
%%

View File

@ -0,0 +1,17 @@
type token =
| TIMES
| STRING of (string)
| SEMICOLON
| RSQUARE
| PLUS
| NAME of (string)
| MINUS
| LSQUARE
| LIST
| LET
| INT of (int)
| IN
| EQUAL
| EOF
| DIV

View File

@ -0,0 +1,19 @@
(* The type of tokens. *)
type token =
| TIMES
| STRING of (string)
| SEMICOLON
| RSQUARE
| PLUS
| NAME of (string)
| MINUS
| LSQUARE
| LIST
| LET
| INT of (int)
| IN
| EQUAL
| EOF
| DIV

View File

@ -0,0 +1,39 @@
{
open Token
exception Error of string
exception Unexpected_character of string
}
(* This rule analyzes a single line and turns it into a stream of
tokens. *)
rule token = parse
(*
| "//" ([^ '\n']* ) (['\n' '\r']+)
{ Lexing.new_line lexbuf ; token lexbuf }
*)
| ('\r'? '\n' '\r'?)
{ Lexing.new_line lexbuf; token lexbuf }
| [' ' '\t']
{ token lexbuf }
| ['0'-'9']+ as i
{ INT (int_of_string i) }
| '"' ( [^ '"' '\\'] | ( '\\' [^ '"'] ) ) as s '"'
{ STRING s }
| (['a'-'z']['a'-'z''A'-'Z''0'-'9''_']+) as v
{ NAME v }
| "let" { LET }
| "in" { IN }
| "list" { LIST }
| "[" { LSQUARE }
| "]" { RSQUARE }
| ";" { SEMICOLON }
| "+" { PLUS }
| "-" { MINUS }
| "*" { TIMES }
| "/" { DIV }
| "=" { EQUAL }
| eof { EOF }
| _
{ raise (Unexpected_character (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }

View File

@ -0,0 +1,31 @@
type file_location = {
filename : string ;
start_line : int ;
start_column : int ;
end_line : int ;
end_column : int ;
}
type virtual_location = string
type t =
| File of file_location
| Virtual of virtual_location
let make (start_pos:Lexing.position) (end_pos:Lexing.position) : t =
let filename = start_pos.pos_fname in
let start_line = start_pos.pos_lnum in
let end_line = end_pos.pos_lnum in
let start_column = start_pos.pos_cnum - start_pos.pos_bol in
let end_column = end_pos.pos_cnum - end_pos.pos_bol in
File { filename ; start_line ; start_column ; end_line ; end_column }
let virtual_location s = Virtual s
let dummy = virtual_location "dummy"
type 'a wrap = {
wrap_content : 'a ;
location : t ;
}
let wrap ~loc wrap_content = { wrap_content ; location = loc }

View File

@ -0,0 +1,874 @@
module MenhirBasics = struct
exception Error
type token = Lex.Token.token
end
include MenhirBasics
let _eRR =
MenhirBasics.Error
type _menhir_env = {
_menhir_lexer: Lexing.lexbuf -> token;
_menhir_lexbuf: Lexing.lexbuf;
_menhir_token: token;
mutable _menhir_error: bool
}
and _menhir_state =
| MenhirState30
| MenhirState27
| MenhirState25
| MenhirState22
| MenhirState20
| MenhirState17
| MenhirState15
| MenhirState11
| MenhirState5
| MenhirState0
# 1 "parser.mly"
open Ast
# 39 "parser.ml"
let rec _menhir_goto_arith_0 : _menhir_env -> 'ttv_tail -> Lexing.position -> _menhir_state -> 'tv_arith_0 -> 'ttv_return =
fun _menhir_env _menhir_stack _endpos _menhir_s _v ->
let _menhir_stack = (_menhir_stack, _endpos, _menhir_s, _v) in
match _menhir_s with
| MenhirState17 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ((('freshtv133 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.IN ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ((('freshtv129 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LET ->
_menhir_run15 _menhir_env (Obj.magic _menhir_stack) MenhirState30 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState30 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState30 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState30) : 'freshtv130)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ((('freshtv131 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv132)) : 'freshtv134)
| MenhirState30 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ((((('freshtv137 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0)) * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ((((('freshtv135 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0)) * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let ((((_menhir_stack, _menhir_s, _startpos__1_), _endpos_a_, _, (a : 'tv_variable), _startpos_a_), _endpos_b_, _, (b : 'tv_arith_0)), _endpos_c_, _, (c : 'tv_arith_0)) = _menhir_stack in
let _5 = () in
let _3 = () in
let _1 = () in
let _endpos = _endpos_c_ in
let _v : 'tv_arith_0 = let _endpos = _endpos_c_ in
let _startpos = _startpos__1_ in
# 95 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Let_in (a , b , c)
)
# 92 "parser.ml"
in
_menhir_goto_arith_0 _menhir_env _menhir_stack _endpos _menhir_s _v) : 'freshtv136)) : 'freshtv138)
| MenhirState0 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv151 * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.EOF ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv147 * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv145 * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let (_menhir_stack, _endpos__1_, _menhir_s, (_1 : 'tv_arith_0)) = _menhir_stack in
let _2 = () in
let _v : (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 111 "parser.ml"
) = let _1 =
# 47 "parser.mly"
( _1 )
# 115 "parser.ml"
in
# 33 "parser.mly"
( _1 )
# 120 "parser.ml"
in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv143) = _menhir_stack in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 128 "parser.ml"
)) = _v in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv141) = Obj.magic _menhir_stack in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 136 "parser.ml"
)) = _v in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv139) = Obj.magic _menhir_stack in
let (_menhir_s : _menhir_state) = _menhir_s in
let ((_1 : (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 144 "parser.ml"
)) : (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 148 "parser.ml"
)) = _v in
(Obj.magic _1 : 'freshtv140)) : 'freshtv142)) : 'freshtv144)) : 'freshtv146)) : 'freshtv148)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv149 * Lexing.position * _menhir_state * 'tv_arith_0) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv150)) : 'freshtv152)
| _ ->
_menhir_fail ()
and _menhir_goto_arith_1 : _menhir_env -> 'ttv_tail -> Lexing.position -> _menhir_state -> 'tv_arith_1 -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _endpos _menhir_s _v _startpos ->
let _menhir_stack = (_menhir_stack, _endpos, _menhir_s, _v, _startpos) in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv127 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.MINUS ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv119 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState27 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState27 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState27) : 'freshtv120)
| Lex.Token.PLUS ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv121 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState25 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState25 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState25) : 'freshtv122)
| Lex.Token.EOF | Lex.Token.IN ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv123 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _endpos__1_, _menhir_s, (_1 : 'tv_arith_1), _startpos__1_) = _menhir_stack in
let _endpos = _endpos__1_ in
let _v : 'tv_arith_0 =
# 99 "parser.mly"
( _1 )
# 205 "parser.ml"
in
_menhir_goto_arith_0 _menhir_env _menhir_stack _endpos _menhir_s _v) : 'freshtv124)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv125 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv126)) : 'freshtv128)
and _menhir_run20 : _menhir_env -> 'ttv_tail * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack ->
let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState20 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState20 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState20
and _menhir_run22 : _menhir_env -> 'ttv_tail * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack ->
let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState22 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState22 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState22
and _menhir_fail : unit -> 'a =
fun () ->
Printf.fprintf stderr "Internal failure -- please contact the parser generator's developers.\n%!";
assert false
and _menhir_goto_arith_2 : _menhir_env -> 'ttv_tail -> Lexing.position -> _menhir_state -> 'tv_arith_2 -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _endpos _menhir_s _v _startpos ->
let _menhir_stack = (_menhir_stack, _endpos, _menhir_s, _v, _startpos) in
match _menhir_s with
| MenhirState0 | MenhirState30 | MenhirState17 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv105 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.DIV ->
_menhir_run22 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.TIMES ->
_menhir_run20 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.EOF | Lex.Token.IN | Lex.Token.MINUS | Lex.Token.PLUS ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv101 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _endpos__1_, _menhir_s, (_1 : 'tv_arith_2), _startpos__1_) = _menhir_stack in
let _startpos = _startpos__1_ in
let _endpos = _endpos__1_ in
let _v : 'tv_arith_1 =
# 91 "parser.mly"
( _1 )
# 272 "parser.ml"
in
_menhir_goto_arith_1 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv102)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv103 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv104)) : 'freshtv106)
| MenhirState25 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv111 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.DIV ->
_menhir_run22 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.TIMES ->
_menhir_run20 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.EOF | Lex.Token.IN | Lex.Token.MINUS | Lex.Token.PLUS ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv107 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let ((_menhir_stack, _endpos_a_, _menhir_s, (a : 'tv_arith_1), _startpos_a_), _endpos_b_, _, (b : 'tv_arith_2), _startpos_b_) = _menhir_stack in
let _2 = () in
let _startpos = _startpos_a_ in
let _endpos = _endpos_b_ in
let _v : 'tv_arith_1 = let _endpos = _endpos_b_ in
let _startpos = _startpos_a_ in
# 81 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Addition (a , b)
)
# 307 "parser.ml"
in
_menhir_goto_arith_1 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv108)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv109 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv110)) : 'freshtv112)
| MenhirState27 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv117 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.DIV ->
_menhir_run22 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.TIMES ->
_menhir_run20 _menhir_env (Obj.magic _menhir_stack)
| Lex.Token.EOF | Lex.Token.IN | Lex.Token.MINUS | Lex.Token.PLUS ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv113 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let ((_menhir_stack, _endpos_a_, _menhir_s, (a : 'tv_arith_1), _startpos_a_), _endpos_b_, _, (b : 'tv_arith_2), _startpos_b_) = _menhir_stack in
let _2 = () in
let _startpos = _startpos_a_ in
let _endpos = _endpos_b_ in
let _v : 'tv_arith_1 = let _endpos = _endpos_b_ in
let _startpos = _startpos_a_ in
# 87 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Substraction (a , b)
)
# 342 "parser.ml"
in
_menhir_goto_arith_1 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv114)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv115 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv116)) : 'freshtv118)
| _ ->
_menhir_fail ()
and _menhir_goto_lead_list_content_SEMICOLON_arith_3_ : _menhir_env -> 'ttv_tail -> 'tv_lead_list_content_SEMICOLON_arith_3_ -> 'ttv_return =
fun _menhir_env _menhir_stack _v ->
let _menhir_stack = (_menhir_stack, _v) in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv99 * 'tv_lead_list_content_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.SEMICOLON ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv93 * 'tv_lead_list_content_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState11 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState11 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState11) : 'freshtv94)
| Lex.Token.RSQUARE ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv95 * 'tv_lead_list_content_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
((let (_menhir_stack, (_1 : 'tv_lead_list_content_SEMICOLON_arith_3_)) = _menhir_stack in
let _v : 'tv_lead_list_SEMICOLON_arith_3_ =
# 22 "parser.mly"
( _1 )
# 384 "parser.ml"
in
_menhir_goto_lead_list_SEMICOLON_arith_3_ _menhir_env _menhir_stack _v) : 'freshtv96)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv97 * 'tv_lead_list_content_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
(raise _eRR : 'freshtv98)) : 'freshtv100)
and _menhir_goto_arith_3 : _menhir_env -> 'ttv_tail -> Lexing.position -> _menhir_state -> 'tv_arith_3 -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _endpos _menhir_s _v _startpos ->
match _menhir_s with
| MenhirState5 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv75 * 'tv_option_SEMICOLON_) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_3) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv73 * 'tv_option_SEMICOLON_) = Obj.magic _menhir_stack in
let (_endpos_x_ : Lexing.position) = _endpos in
let (_ : _menhir_state) = _menhir_s in
let ((x : 'tv_arith_3) : 'tv_arith_3) = _v in
let (_startpos_x_ : Lexing.position) = _startpos in
((let (_menhir_stack, (_1 : 'tv_option_SEMICOLON_)) = _menhir_stack in
let _v : 'tv_lead_list_first_SEMICOLON_arith_3_ =
# 29 "parser.mly"
( [ x ] )
# 414 "parser.ml"
in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv71) = _menhir_stack in
let (_v : 'tv_lead_list_first_SEMICOLON_arith_3_) = _v in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv69) = Obj.magic _menhir_stack in
let (_v : 'tv_lead_list_first_SEMICOLON_arith_3_) = _v in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv67) = Obj.magic _menhir_stack in
let ((x : 'tv_lead_list_first_SEMICOLON_arith_3_) : 'tv_lead_list_first_SEMICOLON_arith_3_) = _v in
((let _v : 'tv_lead_list_content_SEMICOLON_arith_3_ =
# 25 "parser.mly"
( x )
# 428 "parser.ml"
in
_menhir_goto_lead_list_content_SEMICOLON_arith_3_ _menhir_env _menhir_stack _v) : 'freshtv68)) : 'freshtv70)) : 'freshtv72)) : 'freshtv74)) : 'freshtv76)
| MenhirState11 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv79 * 'tv_lead_list_content_SEMICOLON_arith_3_)) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_3) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv77 * 'tv_lead_list_content_SEMICOLON_arith_3_)) = Obj.magic _menhir_stack in
let (_endpos_x_ : Lexing.position) = _endpos in
let (_ : _menhir_state) = _menhir_s in
let ((x : 'tv_arith_3) : 'tv_arith_3) = _v in
let (_startpos_x_ : Lexing.position) = _startpos in
((let (_menhir_stack, (xs : 'tv_lead_list_content_SEMICOLON_arith_3_)) = _menhir_stack in
let _2 = () in
let _v : 'tv_lead_list_content_SEMICOLON_arith_3_ =
# 26 "parser.mly"
( xs @ [ x ] )
# 449 "parser.ml"
in
_menhir_goto_lead_list_content_SEMICOLON_arith_3_ _menhir_env _menhir_stack _v) : 'freshtv78)) : 'freshtv80)
| MenhirState0 | MenhirState30 | MenhirState27 | MenhirState25 | MenhirState17 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv83) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_3) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv81) = Obj.magic _menhir_stack in
let (_endpos__1_ : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let ((_1 : 'tv_arith_3) : 'tv_arith_3) = _v in
let (_startpos__1_ : Lexing.position) = _startpos in
((let _startpos = _startpos__1_ in
let _endpos = _endpos__1_ in
let _v : 'tv_arith_2 =
# 77 "parser.mly"
( _1 )
# 470 "parser.ml"
in
_menhir_goto_arith_2 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv82)) : 'freshtv84)
| MenhirState20 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv87 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_3) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv85 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
let (_endpos_b_ : Lexing.position) = _endpos in
let (_ : _menhir_state) = _menhir_s in
let ((b : 'tv_arith_3) : 'tv_arith_3) = _v in
let (_startpos_b_ : Lexing.position) = _startpos in
((let (_menhir_stack, _endpos_a_, _menhir_s, (a : 'tv_arith_2), _startpos_a_) = _menhir_stack in
let _2 = () in
let _startpos = _startpos_a_ in
let _endpos = _endpos_b_ in
let _v : 'tv_arith_2 = let _endpos = _endpos_b_ in
let _startpos = _startpos_a_ in
# 67 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Multiplication (a , b)
)
# 498 "parser.ml"
in
_menhir_goto_arith_2 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv86)) : 'freshtv88)
| MenhirState22 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv91 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_3) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv89 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
let (_endpos_b_ : Lexing.position) = _endpos in
let (_ : _menhir_state) = _menhir_s in
let ((b : 'tv_arith_3) : 'tv_arith_3) = _v in
let (_startpos_b_ : Lexing.position) = _startpos in
((let (_menhir_stack, _endpos_a_, _menhir_s, (a : 'tv_arith_2), _startpos_a_) = _menhir_stack in
let _2 = () in
let _startpos = _startpos_a_ in
let _endpos = _endpos_b_ in
let _v : 'tv_arith_2 = let _endpos = _endpos_b_ in
let _startpos = _startpos_a_ in
# 73 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Division (a , b)
)
# 526 "parser.ml"
in
_menhir_goto_arith_2 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv90)) : 'freshtv92)
| _ ->
_menhir_fail ()
and _menhir_goto_lead_list_SEMICOLON_arith_3_ : _menhir_env -> 'ttv_tail -> 'tv_lead_list_SEMICOLON_arith_3_ -> 'ttv_return =
fun _menhir_env _menhir_stack _v ->
let _menhir_stack = (_menhir_stack, _v) in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv65 * _menhir_state * Lexing.position)) * 'tv_lead_list_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.RSQUARE ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv61 * _menhir_state * Lexing.position)) * 'tv_lead_list_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _menhir_env._menhir_lexbuf.Lexing.lex_curr_p in
((let _menhir_env = _menhir_discard _menhir_env in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv59 * _menhir_state * Lexing.position)) * 'tv_lead_list_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
let (_endpos__4_ : Lexing.position) = _endpos in
((let ((_menhir_stack, _menhir_s, _startpos__1_), (a : 'tv_lead_list_SEMICOLON_arith_3_)) = _menhir_stack in
let _4 = () in
let _2 = () in
let _1 = () in
let _startpos = _startpos__1_ in
let _endpos = _endpos__4_ in
let _v : 'tv_arith_3 = let _endpos = _endpos__4_ in
let _startpos = _startpos__1_ in
# 59 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ List (a)
)
# 562 "parser.ml"
in
_menhir_goto_arith_3 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv60)) : 'freshtv62)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv63 * _menhir_state * Lexing.position)) * 'tv_lead_list_SEMICOLON_arith_3_) = Obj.magic _menhir_stack in
((let ((_menhir_stack, _menhir_s, _), _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv64)) : 'freshtv66)
and _menhir_goto_option_SEMICOLON_ : _menhir_env -> 'ttv_tail -> 'tv_option_SEMICOLON_ -> 'ttv_return =
fun _menhir_env _menhir_stack _v ->
let _menhir_stack = (_menhir_stack, _v) in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv57 * 'tv_option_SEMICOLON_) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState5 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState5 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState5) : 'freshtv58)
and _menhir_errorcase : _menhir_env -> 'ttv_tail -> _menhir_state -> 'ttv_return =
fun _menhir_env _menhir_stack _menhir_s ->
match _menhir_s with
| MenhirState30 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (((('freshtv37 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) * Lexing.position * _menhir_state * 'tv_arith_0)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv38)
| MenhirState27 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv39 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv40)
| MenhirState25 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv41 * Lexing.position * _menhir_state * 'tv_arith_1 * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv42)
| MenhirState22 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv43 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv44)
| MenhirState20 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv45 * Lexing.position * _menhir_state * 'tv_arith_2 * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv46)
| MenhirState17 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : (('freshtv47 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv48)
| MenhirState15 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv49 * _menhir_state * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv50)
| MenhirState11 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv51 * 'tv_lead_list_content_SEMICOLON_arith_3_)) = Obj.magic _menhir_stack in
(raise _eRR : 'freshtv52)
| MenhirState5 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv53 * 'tv_option_SEMICOLON_) = Obj.magic _menhir_stack in
(raise _eRR : 'freshtv54)
| MenhirState0 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv55) = Obj.magic _menhir_stack in
(raise _eRR : 'freshtv56)
and _menhir_run1 : _menhir_env -> 'ttv_tail -> Lexing.position -> _menhir_state -> (
# 4 "lex/token.mly"
(string)
# 644 "parser.ml"
) -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _endpos _menhir_s _v _startpos ->
let _menhir_env = _menhir_discard _menhir_env in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv35) = Obj.magic _menhir_stack in
let (_endpos__1_ : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let ((_1 : (
# 4 "lex/token.mly"
(string)
# 655 "parser.ml"
)) : (
# 4 "lex/token.mly"
(string)
# 659 "parser.ml"
)) = _v in
let (_startpos__1_ : Lexing.position) = _startpos in
((let _startpos = _startpos__1_ in
let _endpos = _endpos__1_ in
let _v : 'tv_variable = let _endpos = _endpos__1_ in
let _startpos = _startpos__1_ in
# 38 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc _1
)
# 672 "parser.ml"
in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv33) = _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_variable) = _v in
let (_startpos : Lexing.position) = _startpos in
((let _menhir_stack = (_menhir_stack, _endpos, _menhir_s, _v, _startpos) in
match _menhir_s with
| MenhirState0 | MenhirState30 | MenhirState27 | MenhirState25 | MenhirState22 | MenhirState20 | MenhirState17 | MenhirState11 | MenhirState5 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv25 * Lexing.position * _menhir_state * 'tv_variable * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv23 * Lexing.position * _menhir_state * 'tv_variable * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _endpos_a_, _menhir_s, (a : 'tv_variable), _startpos_a_) = _menhir_stack in
let _startpos = _startpos_a_ in
let _endpos = _endpos_a_ in
let _v : 'tv_arith_4 = let _endpos = _endpos_a_ in
let _startpos = _startpos_a_ in
# 52 "parser.mly"
(
let loc = Location.make _startpos _endpos in
Location.wrap ~loc @@ Arith_variable (a)
)
# 698 "parser.ml"
in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv21) = _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_4) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv19) = Obj.magic _menhir_stack in
let (_endpos : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let (_v : 'tv_arith_4) = _v in
let (_startpos : Lexing.position) = _startpos in
((let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv17) = Obj.magic _menhir_stack in
let (_endpos__1_ : Lexing.position) = _endpos in
let (_menhir_s : _menhir_state) = _menhir_s in
let ((_1 : 'tv_arith_4) : 'tv_arith_4) = _v in
let (_startpos__1_ : Lexing.position) = _startpos in
((let _startpos = _startpos__1_ in
let _endpos = _endpos__1_ in
let _v : 'tv_arith_3 =
# 63 "parser.mly"
( _1 )
# 723 "parser.ml"
in
_menhir_goto_arith_3 _menhir_env _menhir_stack _endpos _menhir_s _v _startpos) : 'freshtv18)) : 'freshtv20)) : 'freshtv22)) : 'freshtv24)) : 'freshtv26)
| MenhirState15 ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv31 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position) = Obj.magic _menhir_stack in
((assert (not _menhir_env._menhir_error);
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.EQUAL ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv27 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LET ->
_menhir_run15 _menhir_env (Obj.magic _menhir_stack) MenhirState17 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState17 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState17 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState17) : 'freshtv28)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv29 * _menhir_state * Lexing.position) * Lexing.position * _menhir_state * 'tv_variable * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _, _menhir_s, _, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv30)) : 'freshtv32)) : 'freshtv34)) : 'freshtv36)
and _menhir_run2 : _menhir_env -> 'ttv_tail -> _menhir_state -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _menhir_s _startpos ->
let _menhir_stack = (_menhir_stack, _menhir_s, _startpos) in
let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LSQUARE ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv13 * _menhir_state * Lexing.position) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.SEMICOLON ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv5) = Obj.magic _menhir_stack in
((let _menhir_env = _menhir_discard _menhir_env in
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv3) = Obj.magic _menhir_stack in
((let x = () in
let _v : 'tv_option_SEMICOLON_ =
# 116 "/home/user/.opam/tezos/lib/menhir/standard.mly"
( Some x )
# 778 "parser.ml"
in
_menhir_goto_option_SEMICOLON_ _menhir_env _menhir_stack _v) : 'freshtv4)) : 'freshtv6)
| Lex.Token.LIST | Lex.Token.NAME _ ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv7) = Obj.magic _menhir_stack in
((let _v : 'tv_option_SEMICOLON_ =
# 114 "/home/user/.opam/tezos/lib/menhir/standard.mly"
( None )
# 787 "parser.ml"
in
_menhir_goto_option_SEMICOLON_ _menhir_env _menhir_stack _v) : 'freshtv8)
| Lex.Token.RSQUARE ->
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv9) = Obj.magic _menhir_stack in
((let _v : 'tv_lead_list_SEMICOLON_arith_3_ =
# 21 "parser.mly"
( [] )
# 796 "parser.ml"
in
_menhir_goto_lead_list_SEMICOLON_arith_3_ _menhir_env _menhir_stack _v) : 'freshtv10)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : ('freshtv11 * _menhir_state * Lexing.position)) = Obj.magic _menhir_stack in
((let (_menhir_stack, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv12)) : 'freshtv14)
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv15 * _menhir_state * Lexing.position) = Obj.magic _menhir_stack in
((let (_menhir_stack, _menhir_s, _) = _menhir_stack in
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) _menhir_s) : 'freshtv16)
and _menhir_run15 : _menhir_env -> 'ttv_tail -> _menhir_state -> Lexing.position -> 'ttv_return =
fun _menhir_env _menhir_stack _menhir_s _startpos ->
let _menhir_stack = (_menhir_stack, _menhir_s, _startpos) in
let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState15 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState15
and _menhir_discard : _menhir_env -> _menhir_env =
fun _menhir_env ->
let lexer = _menhir_env._menhir_lexer in
let lexbuf = _menhir_env._menhir_lexbuf in
let _tok = lexer lexbuf in
{
_menhir_lexer = lexer;
_menhir_lexbuf = lexbuf;
_menhir_token = _tok;
_menhir_error = false;
}
and entry_point : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> (
# 5 "parser.mly"
(Ast.entry_point Location.wrap)
# 842 "parser.ml"
) =
fun lexer lexbuf ->
let _menhir_env =
let (lexer : Lexing.lexbuf -> token) = lexer in
let (lexbuf : Lexing.lexbuf) = lexbuf in
((let _tok = Obj.magic () in
{
_menhir_lexer = lexer;
_menhir_lexbuf = lexbuf;
_menhir_token = _tok;
_menhir_error = false;
}) : _menhir_env)
in
Obj.magic (let (_menhir_env : _menhir_env) = _menhir_env in
let (_menhir_stack : 'freshtv1) = ((), _menhir_env._menhir_lexbuf.Lexing.lex_curr_p) in
((let _menhir_env = _menhir_discard _menhir_env in
let _tok = _menhir_env._menhir_token in
match _tok with
| Lex.Token.LET ->
_menhir_run15 _menhir_env (Obj.magic _menhir_stack) MenhirState0 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.LIST ->
_menhir_run2 _menhir_env (Obj.magic _menhir_stack) MenhirState0 _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| Lex.Token.NAME _v ->
_menhir_run1 _menhir_env (Obj.magic _menhir_stack) _menhir_env._menhir_lexbuf.Lexing.lex_curr_p MenhirState0 _v _menhir_env._menhir_lexbuf.Lexing.lex_start_p
| _ ->
assert (not _menhir_env._menhir_error);
_menhir_env._menhir_error <- true;
_menhir_errorcase _menhir_env (Obj.magic _menhir_stack) MenhirState0) : 'freshtv2))
# 269 "/home/user/.opam/tezos/lib/menhir/standard.mly"
# 875 "parser.ml"

View File

@ -0,0 +1,12 @@
(* The type of tokens. *)
type token = Lex.Token.token
(* This exception is raised by the monolithic API functions. *)
exception Error
(* The monolithic API. *)
val entry_point: (Lexing.lexbuf -> token) -> Lexing.lexbuf -> (Ast.entry_point Location.wrap)

View File

@ -0,0 +1,99 @@
%{
open Ast
%}
%start <Ast.entry_point Location.wrap> entry_point
%%
trail_list(separator, X):
| { [] }
| trail_list_content(separator, X) { $1 }
trail_list_content(separator, X):
| x = trail_list_last(separator, X) { x }
| x = X separator xs = trail_list_content(separator, X) { x :: xs }
trail_list_last(separator, X):
| x = X option(separator) { [ x ] }
lead_list(separator, X):
| { [] }
| lead_list_content(separator, X) { $1 }
lead_list_content(separator, X):
| x = lead_list_first(separator, X) { x }
| xs = lead_list_content(separator, X) separator x = X { xs @ [ x ] }
lead_list_first (separator, X):
| option(separator) x = X { [ x ] }
(* Full Grammar *)
(* Generated Language *)
entry_point : arith EOF { $1 }
(* Singletons *)
variable : NAME
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc $1
}
(* Hierarchies *)
(* Top-level for arith *)
%inline arith : arith_0 { $1 }
arith_4 :
(* Arith_variable *)
| a = variable
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Arith_variable (a)
}
arith_3 :
(* List *)
| LIST LSQUARE a = lead_list(SEMICOLON, arith_3) RSQUARE
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ List (a)
}
| arith_4 { $1 }
arith_2 :
(* Multiplication *)
| a = arith_2 TIMES b = arith_3
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Multiplication (a , b)
}
(* Division *)
| a = arith_2 DIV b = arith_3
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Division (a , b)
}
| arith_3 { $1 }
arith_1 :
(* Addition *)
| a = arith_1 PLUS b = arith_2
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Addition (a , b)
}
(* Substraction *)
| a = arith_1 MINUS b = arith_2
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Substraction (a , b)
}
| arith_2 { $1 }
arith_0 :
(* Let_in *)
| LET a = variable EQUAL b = arith_0 IN c = arith_0
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Let_in (a , b , c)
}
| arith_1 { $1 }

View File

@ -0,0 +1,70 @@
(* Full Grammar *)
(* Generated Language *)
entry_point : arith EOF { $1 }
(* Singletons *)
variable : NAME
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc $1
}
(* Hierarchies *)
(* Top-level for arith *)
%inline arith : arith_0 { $1 }
arith_4 :
(* Arith_variable *)
| a = variable
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Arith_variable (a)
}
arith_3 :
(* List *)
| LIST LSQUARE a = lead_list(SEMICOLON, arith_3) RSQUARE
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ List (a)
}
| arith_4 { $1 }
arith_2 :
(* Multiplication *)
| a = arith_2 TIMES b = arith_3
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Multiplication (a , b)
}
(* Division *)
| a = arith_2 DIV b = arith_3
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Division (a , b)
}
| arith_3 { $1 }
arith_1 :
(* Addition *)
| a = arith_1 PLUS b = arith_2
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Addition (a , b)
}
(* Substraction *)
| a = arith_1 MINUS b = arith_2
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Substraction (a , b)
}
| arith_2 { $1 }
arith_0 :
(* Let_in *)
| LET a = variable EQUAL b = arith_0 IN c = arith_0
{
let loc = Location.make $startpos $endpos in
Location.wrap ~loc @@ Let_in (a , b , c)
}
| arith_1 { $1 }

View File

@ -0,0 +1,29 @@
%{
open Ast
%}
%start <Ast.entry_point Location.wrap> entry_point
%%
trail_list(separator, X):
| { [] }
| trail_list_content(separator, X) { $1 }
trail_list_content(separator, X):
| x = trail_list_last(separator, X) { x }
| x = X separator xs = trail_list_content(separator, X) { x :: xs }
trail_list_last(separator, X):
| x = X option(separator) { [ x ] }
lead_list(separator, X):
| { [] }
| lead_list_content(separator, X) { $1 }
lead_list_content(separator, X):
| x = lead_list_first(separator, X) { x }
| xs = lead_list_content(separator, X) separator x = X { xs @ [ x ] }
lead_list_first (separator, X):
| option(separator) x = X { [ x ] }

View File

@ -0,0 +1,9 @@
%token EOF
%token <int> INT
%token <string> STRING
%token <string> NAME
%token LET IN EQUAL
%token PLUS MINUS TIMES DIV
%token LIST LSQUARE RSQUARE SEMICOLON
%%

View File

@ -0,0 +1,10 @@
let () =
(match Array.length Sys.argv with
| 1 -> exit 1
| _ -> ()) ;
let path = Sys.argv.(1) in
let chan = open_in path in
let lexbuf = Lexing.from_channel chan in
let _ast = Parser.entry_point Lex.Lexer.token lexbuf in
Format.printf "parse ok\n" ;
()

View File

@ -8,7 +8,7 @@
(library
(name ligo_parser)
(public_name ligo-parser)
(public_name ligo.parser)
(modules_without_implementation Error)
(libraries getopt hex str uutf zarith))