Merge branch 'rinderknecht-dev' into 'dev'
Removed [Error.mli] See merge request ligolang/ligo!260
This commit is contained in:
commit
a8fdbbfd84
@ -1 +1 @@
|
||||
--explain --external-tokens LexToken --base Parser ParToken.mly
|
||||
--table --strict --explain --external-tokens LexToken --base Parser ParToken.mly
|
||||
|
@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/Error.mli
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
|
57
src/passes/1-parser/cameligo/ParserAPI.ml
Normal file
57
src/passes/1-parser/cameligo/ParserAPI.ml
Normal file
@ -0,0 +1,57 @@
|
||||
(** Generic parser for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) =
|
||||
struct
|
||||
|
||||
module I = Parser.MenhirInterpreter
|
||||
|
||||
(* The parser has successfully produced a semantic value. *)
|
||||
|
||||
let success v = v
|
||||
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
let fail _checkpoint = raise Parser.Error
|
||||
|
||||
(* The generic parsing function *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success fail supplier parser
|
||||
in close (); ast
|
||||
|
||||
let mono_contract = Parser.contract
|
||||
|
||||
end
|
39
src/passes/1-parser/cameligo/ParserAPI.mli
Normal file
39
src/passes/1-parser/cameligo/ParserAPI.mli
Normal file
@ -0,0 +1,39 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) :
|
||||
sig
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
end
|
@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
type Error.t += ParseError
|
||||
type error = SyntaxError
|
||||
|
||||
let error_to_string = function
|
||||
ParseError -> "Syntax error.\n"
|
||||
| _ -> assert false
|
||||
SyntaxError -> "Syntax error.\n"
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
let msg = error_to_string value
|
||||
and reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
@ -77,11 +76,11 @@ let () =
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser)
|
||||
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} =
|
||||
Lexer.open_token_stream (Some pp_input)
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
@ -98,7 +97,10 @@ let tokeniser = read ~log
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
(* The incremental API *)
|
||||
let ast = ParserFront.incr_contract lexer_inst in
|
||||
(* The monolithic API *)
|
||||
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
@ -126,7 +128,7 @@ let () =
|
||||
options#mode err ~file
|
||||
| Parser.Error ->
|
||||
let region = get_last () in
|
||||
let error = Region.{region; value=ParseError} in
|
||||
let error = Region.{region; value=SyntaxError} in
|
||||
let () = close_all () in
|
||||
print_error ~offsets:options#offsets
|
||||
options#mode error ~file
|
||||
|
@ -3,38 +3,33 @@
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --explain --external-tokens LexToken))
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
|
||||
(library
|
||||
(name parser_cameligo)
|
||||
(public_name ligo.parser.cameligo)
|
||||
(modules AST cameligo Parser ParserLog LexToken)
|
||||
(libraries
|
||||
menhirLib
|
||||
parser_shared
|
||||
str
|
||||
simple-utils
|
||||
tezos-utils
|
||||
getopt
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared ))
|
||||
)
|
||||
getopt)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared )))
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
(libraries
|
||||
parser_cameligo)
|
||||
(modules
|
||||
LexerMain
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Parser_cameligo))
|
||||
)
|
||||
(modules
|
||||
LexerMain)
|
||||
(flags (:standard -open Parser_shared -open Parser_cameligo)))
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries
|
||||
(libraries
|
||||
parser_cameligo)
|
||||
(modules
|
||||
ParserMain
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))
|
||||
)
|
||||
(modules
|
||||
ParserMain)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
||||
|
@ -1 +1 @@
|
||||
--explain --external-tokens LexToken --base Parser ParToken.mly
|
||||
--table --strict --explain --external-tokens LexToken --base Parser
|
@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/Error.mli
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
|
0
src/passes/1-parser/pascaligo/.unlexer.tag
Normal file
0
src/passes/1-parser/pascaligo/.unlexer.tag
Normal file
@ -77,10 +77,8 @@ type t =
|
||||
| Case of Region.t (* "case" *)
|
||||
| Const of Region.t (* "const" *)
|
||||
| Contains of Region.t (* "contains" *)
|
||||
| Down of Region.t (* "down" *)
|
||||
| Else of Region.t (* "else" *)
|
||||
| End of Region.t (* "end" *)
|
||||
| Fail of Region.t (* "fail" *)
|
||||
| False of Region.t (* "False" *)
|
||||
| For of Region.t (* "for" *)
|
||||
| From of Region.t (* "from" *)
|
||||
@ -100,7 +98,6 @@ type t =
|
||||
| Remove of Region.t (* "remove" *)
|
||||
| Set of Region.t (* "set" *)
|
||||
| Skip of Region.t (* "skip" *)
|
||||
| Step of Region.t (* "step" *)
|
||||
| Then of Region.t (* "then" *)
|
||||
| To of Region.t (* "to" *)
|
||||
| True of Region.t (* "True" *)
|
||||
|
@ -75,10 +75,8 @@ type t =
|
||||
| Case of Region.t (* "case" *)
|
||||
| Const of Region.t (* "const" *)
|
||||
| Contains of Region.t (* "contains" *)
|
||||
| Down of Region.t (* "down" *)
|
||||
| Else of Region.t (* "else" *)
|
||||
| End of Region.t (* "end" *)
|
||||
| Fail of Region.t (* "fail" *)
|
||||
| False of Region.t (* "False" *)
|
||||
| For of Region.t (* "for" *)
|
||||
| From of Region.t (* "from" *)
|
||||
@ -98,7 +96,6 @@ type t =
|
||||
| Remove of Region.t (* "remove" *)
|
||||
| Set of Region.t (* "set" *)
|
||||
| Skip of Region.t (* "skip" *)
|
||||
| Step of Region.t (* "step" *)
|
||||
| Then of Region.t (* "then" *)
|
||||
| To of Region.t (* "to" *)
|
||||
| True of Region.t (* "True" *)
|
||||
@ -184,10 +181,8 @@ let proj_token = function
|
||||
| Case region -> region, "Case"
|
||||
| Const region -> region, "Const"
|
||||
| Contains region -> region, "Contains"
|
||||
| Down region -> region, "Down"
|
||||
| Else region -> region, "Else"
|
||||
| End region -> region, "End"
|
||||
| Fail region -> region, "Fail"
|
||||
| False region -> region, "False"
|
||||
| For region -> region, "For"
|
||||
| From region -> region, "From"
|
||||
@ -207,7 +202,6 @@ let proj_token = function
|
||||
| Remove region -> region, "Remove"
|
||||
| Set region -> region, "Set"
|
||||
| Skip region -> region, "Skip"
|
||||
| Step region -> region, "Step"
|
||||
| Then region -> region, "Then"
|
||||
| To region -> region, "To"
|
||||
| True region -> region, "True"
|
||||
@ -276,10 +270,8 @@ let to_lexeme = function
|
||||
| Case _ -> "case"
|
||||
| Const _ -> "const"
|
||||
| Contains _ -> "contains"
|
||||
| Down _ -> "down"
|
||||
| Else _ -> "else"
|
||||
| End _ -> "end"
|
||||
| Fail _ -> "fail"
|
||||
| False _ -> "False"
|
||||
| For _ -> "for"
|
||||
| From _ -> "from"
|
||||
@ -299,7 +291,6 @@ let to_lexeme = function
|
||||
| Remove _ -> "remove"
|
||||
| Set _ -> "set"
|
||||
| Skip _ -> "skip"
|
||||
| Step _ -> "step"
|
||||
| Then _ -> "then"
|
||||
| To _ -> "to"
|
||||
| True _ -> "True"
|
||||
@ -336,13 +327,11 @@ let keywords = [
|
||||
(fun reg -> Case reg);
|
||||
(fun reg -> Const reg);
|
||||
(fun reg -> Contains reg);
|
||||
(fun reg -> Down reg);
|
||||
(fun reg -> Else reg);
|
||||
(fun reg -> End reg);
|
||||
(fun reg -> For reg);
|
||||
(fun reg -> From reg);
|
||||
(fun reg -> Function reg);
|
||||
(fun reg -> Fail reg);
|
||||
(fun reg -> False reg);
|
||||
(fun reg -> If reg);
|
||||
(fun reg -> In reg);
|
||||
@ -360,7 +349,6 @@ let keywords = [
|
||||
(fun reg -> Remove reg);
|
||||
(fun reg -> Set reg);
|
||||
(fun reg -> Skip reg);
|
||||
(fun reg -> Step reg);
|
||||
(fun reg -> Then reg);
|
||||
(fun reg -> To reg);
|
||||
(fun reg -> True reg);
|
||||
@ -560,10 +548,8 @@ let is_kwd = function
|
||||
| Case _
|
||||
| Const _
|
||||
| Contains _
|
||||
| Down _
|
||||
| Else _
|
||||
| End _
|
||||
| Fail _
|
||||
| False _
|
||||
| For _
|
||||
| From _
|
||||
@ -583,7 +569,6 @@ let is_kwd = function
|
||||
| Remove _
|
||||
| Set _
|
||||
| Skip _
|
||||
| Step _
|
||||
| Then _
|
||||
| To _
|
||||
| True _
|
||||
|
57
src/passes/1-parser/pascaligo/ParserAPI.ml
Normal file
57
src/passes/1-parser/pascaligo/ParserAPI.ml
Normal file
@ -0,0 +1,57 @@
|
||||
(** Generic parser for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) =
|
||||
struct
|
||||
|
||||
module I = Parser.MenhirInterpreter
|
||||
|
||||
(* The parser has successfully produced a semantic value. *)
|
||||
|
||||
let success v = v
|
||||
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
let fail _checkpoint = raise Parser.Error
|
||||
|
||||
(* The generic parsing function *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success fail supplier parser
|
||||
in close (); ast
|
||||
|
||||
let mono_contract = Parser.contract
|
||||
|
||||
end
|
39
src/passes/1-parser/pascaligo/ParserAPI.mli
Normal file
39
src/passes/1-parser/pascaligo/ParserAPI.mli
Normal file
@ -0,0 +1,39 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) :
|
||||
sig
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
end
|
@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
type Error.t += ParseError
|
||||
type error = SyntaxError
|
||||
|
||||
let error_to_string = function
|
||||
ParseError -> "Syntax error.\n"
|
||||
| _ -> assert false
|
||||
SyntaxError -> "Syntax error.\n"
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
let msg = error_to_string value
|
||||
and reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
@ -77,11 +76,11 @@ let () =
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser)
|
||||
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} =
|
||||
Lexer.open_token_stream (Some pp_input)
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
@ -98,7 +97,10 @@ let tokeniser = read ~log
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
(* The incremental API *)
|
||||
let ast = ParserFront.incr_contract lexer_inst in
|
||||
(* The monolithic API *)
|
||||
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
@ -126,7 +128,7 @@ let () =
|
||||
options#mode err ~file
|
||||
| Parser.Error ->
|
||||
let region = get_last () in
|
||||
let error = Region.{region; value=ParseError} in
|
||||
let error = Region.{region; value=SyntaxError} in
|
||||
let () = close_all () in
|
||||
print_error ~offsets:options#offsets
|
||||
options#mode error ~file
|
||||
|
@ -3,18 +3,18 @@
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --explain --external-tokens LexToken))
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
|
||||
(library
|
||||
(name parser_pascaligo)
|
||||
(public_name ligo.parser.pascaligo)
|
||||
(modules AST pascaligo Parser ParserLog LexToken)
|
||||
(libraries
|
||||
menhirLib
|
||||
parser_shared
|
||||
hex
|
||||
simple-utils
|
||||
tezos-utils
|
||||
)
|
||||
tezos-utils)
|
||||
(flags (:standard -open Parser_shared -open Simple_utils))
|
||||
)
|
||||
|
||||
@ -26,20 +26,16 @@
|
||||
tezos-utils
|
||||
parser_pascaligo)
|
||||
(modules
|
||||
LexerMain
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Parser_pascaligo))
|
||||
)
|
||||
LexerMain)
|
||||
(flags (:standard -open Parser_shared -open Parser_pascaligo)))
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries
|
||||
parser_pascaligo)
|
||||
(modules
|
||||
ParserMain
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo))
|
||||
)
|
||||
ParserMain)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
||||
|
||||
;; Les deux directives (rule) qui suivent sont pour le dev local.
|
||||
;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier.
|
||||
|
121
src/passes/1-parser/pascaligo/unlexer.ml
Normal file
121
src/passes/1-parser/pascaligo/unlexer.ml
Normal file
@ -0,0 +1,121 @@
|
||||
(** Converting the textual representation of tokens produced by Menhir
|
||||
into concrete syntax *)
|
||||
|
||||
(* See [ParToken.mly] *)
|
||||
|
||||
let gen_sym prefix =
|
||||
let count = ref 0 in
|
||||
fun () -> incr count;
|
||||
prefix ^ string_of_int !count
|
||||
|
||||
let id_sym = gen_sym "id"
|
||||
and ctor_sym = gen_sym "C"
|
||||
|
||||
let concrete = function
|
||||
(* Keywords *)
|
||||
|
||||
"And" -> "and"
|
||||
| "Begin" -> "begin"
|
||||
| "BigMap" -> "big_map"
|
||||
| "Block" -> "block"
|
||||
| "Case" -> "case"
|
||||
| "Const" -> "const"
|
||||
| "Contains" -> "contains"
|
||||
| "Else" -> "else"
|
||||
| "End" -> "end"
|
||||
| "False" -> "False"
|
||||
| "For" -> "for"
|
||||
| "Function" -> "function"
|
||||
| "From" -> "from"
|
||||
| "If" -> "if"
|
||||
| "In" -> "in"
|
||||
| "Is" -> "is"
|
||||
| "List" -> "list"
|
||||
| "Map" -> "map"
|
||||
| "Mod" -> "mod"
|
||||
| "Nil" -> "nil"
|
||||
| "Not" -> "not"
|
||||
| "Of" -> "of"
|
||||
| "Or" -> "or"
|
||||
| "Patch" -> "patch"
|
||||
| "Record" -> "record"
|
||||
| "Remove" -> "remove"
|
||||
| "Set" -> "set"
|
||||
| "Skip" -> "skip"
|
||||
| "Then" -> "then"
|
||||
| "To" -> "to"
|
||||
| "True" -> "True"
|
||||
| "Type" -> "type"
|
||||
| "Unit" -> "Unit"
|
||||
| "Var" -> "var"
|
||||
| "While" -> "while"
|
||||
| "With" -> "with"
|
||||
|
||||
(* Data constructors *)
|
||||
|
||||
| "C_None" -> "None"
|
||||
| "C_Some" -> "Some"
|
||||
|
||||
(* Symbols *)
|
||||
|
||||
| "SEMI" -> ";"
|
||||
| "COMMA" -> ","
|
||||
| "LPAR" -> "("
|
||||
| "RPAR" -> ")"
|
||||
| "LBRACE" -> "{"
|
||||
| "RBRACE" -> "}"
|
||||
| "LBRACKET" -> "["
|
||||
| "RBRACKET" -> "]"
|
||||
| "CONS" -> "#"
|
||||
| "VBAR" -> "|"
|
||||
| "ARROW" -> "->"
|
||||
| "ASS" -> ":="
|
||||
| "EQ" -> "="
|
||||
| "COLON" -> ":"
|
||||
| "LT" -> "<"
|
||||
| "LE" -> "<="
|
||||
| "GT" -> ">"
|
||||
| "GE" -> ">="
|
||||
| "NE" -> "=/="
|
||||
| "PLUS" -> "+"
|
||||
| "MINUS" -> " -"
|
||||
| "SLASH" -> "/"
|
||||
| "TIMES" -> "*"
|
||||
| "DOT" -> "."
|
||||
| "WILD" -> "_"
|
||||
| "CAT" -> "^"
|
||||
|
||||
(* Literals *)
|
||||
|
||||
| "String" -> "\"a string\""
|
||||
| "Bytes" -> "0xAA"
|
||||
| "Int" -> "1"
|
||||
| "Nat" -> "1n"
|
||||
| "Mutez" -> "1mutez"
|
||||
| "Ident" -> id_sym ()
|
||||
| "Constr" -> ctor_sym ()
|
||||
|
||||
(* Virtual tokens *)
|
||||
|
||||
| "EOF" -> ""
|
||||
|
||||
(* For completeness of open sum types *)
|
||||
|
||||
| _ -> "<Unknown>"
|
||||
|
||||
(* Unlexing a sentence *)
|
||||
|
||||
let unlex (sentence: string) : Buffer.t =
|
||||
let tokens = Str.split (Str.regexp " ") sentence in
|
||||
let lexemes = List.map concrete tokens in
|
||||
let buffer = Buffer.create 31 in
|
||||
let rec trans = function
|
||||
[] -> ()
|
||||
| [s] -> Buffer.add_string buffer s
|
||||
| s::l -> Buffer.add_string buffer (s ^ " "); trans l
|
||||
in trans lexemes; buffer
|
||||
|
||||
(* Reading one line from the standard input channel and unlex it. *)
|
||||
|
||||
let out = unlex (input_line stdin) |> Buffer.contents
|
||||
let () = Printf.printf "%s\n" out
|
@ -1 +1 @@
|
||||
--explain --external-tokens LexToken --base Parser ParToken.mly
|
||||
--table --explain --external-tokens LexToken --base Parser ParToken.mly
|
||||
|
@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/Error.mli
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
|
@ -5,12 +5,13 @@
|
||||
|
||||
(* Literals *)
|
||||
|
||||
%token <string Region.reg> Ident "<ident>"
|
||||
%token <string Region.reg> Constr "<constr>"
|
||||
%token <string Region.reg> String "<string>"
|
||||
%token <(string * Z.t) Region.reg> Int "<int>"
|
||||
%token <(string * Z.t) Region.reg> Nat "<nat>"
|
||||
%token <(string * Z.t) Region.reg> Mutez "<mutez>"
|
||||
%token <string Region.reg> String "<string>"
|
||||
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
|
||||
%token <(string * Z.t) Region.reg> Int "<int>"
|
||||
%token <(string * Z.t) Region.reg> Nat "<nat>"
|
||||
%token <(string * Z.t) Region.reg> Mutez "<mutez>"
|
||||
%token <string Region.reg> Ident "<ident>"
|
||||
%token <string Region.reg> Constr "<constr>"
|
||||
|
||||
(* Symbols *)
|
||||
|
||||
|
57
src/passes/1-parser/reasonligo/ParserAPI.ml
Normal file
57
src/passes/1-parser/reasonligo/ParserAPI.ml
Normal file
@ -0,0 +1,57 @@
|
||||
(** Generic parser for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) =
|
||||
struct
|
||||
|
||||
module I = Parser.MenhirInterpreter
|
||||
|
||||
(* The parser has successfully produced a semantic value. *)
|
||||
|
||||
let success v = v
|
||||
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
let fail _checkpoint = raise Parser.Error
|
||||
|
||||
(* The generic parsing function *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success fail supplier parser
|
||||
in close (); ast
|
||||
|
||||
let mono_contract = Parser.contract
|
||||
|
||||
end
|
39
src/passes/1-parser/reasonligo/ParserAPI.mli
Normal file
39
src/passes/1-parser/reasonligo/ParserAPI.mli
Normal file
@ -0,0 +1,39 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens *)
|
||||
|
||||
type token
|
||||
|
||||
(* This exception is raised by the monolithic API functions *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API *)
|
||||
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
|
||||
|
||||
(* The incremental API *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token) :
|
||||
sig
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
end
|
@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
type Error.t += ParseError
|
||||
type error = SyntaxError
|
||||
|
||||
let error_to_string = function
|
||||
ParseError -> "Syntax error.\n"
|
||||
| _ -> assert false
|
||||
SyntaxError -> "Syntax error.\n"
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
let msg = error_to_string value
|
||||
and reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
@ -77,11 +76,11 @@ let () =
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser)
|
||||
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} =
|
||||
Lexer.open_token_stream (Some pp_input)
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
@ -98,7 +97,10 @@ let tokeniser = read ~log
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
(* The incremental API *)
|
||||
let ast = ParserFront.incr_contract lexer_inst in
|
||||
(* The monolithic API *)
|
||||
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
@ -126,7 +128,7 @@ let () =
|
||||
options#mode err ~file
|
||||
| Parser.Error ->
|
||||
let region = get_last () in
|
||||
let error = Region.{region; value=ParseError} in
|
||||
let error = Region.{region; value=SyntaxError} in
|
||||
let () = close_all () in
|
||||
print_error ~offsets:options#offsets
|
||||
options#mode error ~file
|
||||
|
@ -3,39 +3,34 @@
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --explain --dump --strict --external-tokens LexToken))
|
||||
(flags -la 1 --table --explain --strict --external-tokens LexToken))
|
||||
|
||||
(library
|
||||
(name parser_reasonligo)
|
||||
(public_name ligo.parser.reasonligo)
|
||||
(modules reasonligo LexToken Parser)
|
||||
(libraries
|
||||
menhirLib
|
||||
parser_shared
|
||||
parser_cameligo
|
||||
str
|
||||
simple-utils
|
||||
tezos-utils
|
||||
getopt
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo ))
|
||||
)
|
||||
getopt)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
(libraries
|
||||
parser_reasonligo)
|
||||
(modules
|
||||
LexerMain
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Parser_reasonligo))
|
||||
)
|
||||
(modules
|
||||
LexerMain)
|
||||
(flags (:standard -open Parser_shared -open Parser_reasonligo)))
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries
|
||||
(libraries
|
||||
parser_reasonligo)
|
||||
(modules
|
||||
ParserMain
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_reasonligo))
|
||||
)
|
||||
(modules
|
||||
ParserMain)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_reasonligo)))
|
||||
|
@ -1,3 +0,0 @@
|
||||
type t = ..
|
||||
|
||||
type error = t
|
@ -136,11 +136,13 @@ module type S =
|
||||
|
||||
(* Error reporting *)
|
||||
|
||||
exception Error of Error.t Region.reg
|
||||
type error
|
||||
|
||||
exception Error of error Region.reg
|
||||
|
||||
val print_error :
|
||||
?offsets:bool -> [`Byte | `Point] ->
|
||||
Error.t Region.reg -> file:bool -> unit
|
||||
error Region.reg -> file:bool -> unit
|
||||
|
||||
end
|
||||
|
||||
|
@ -159,10 +159,11 @@ module type S = sig
|
||||
|
||||
(* Error reporting *)
|
||||
|
||||
exception Error of Error.t Region.reg
|
||||
type error
|
||||
exception Error of error Region.reg
|
||||
|
||||
val print_error : ?offsets:bool -> [`Byte | `Point] ->
|
||||
Error.t Region.reg -> file:bool -> unit
|
||||
error Region.reg -> file:bool -> unit
|
||||
end
|
||||
|
||||
(* The functorised interface
|
||||
@ -330,22 +331,23 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
|
||||
(* ERRORS *)
|
||||
|
||||
type Error.t += Invalid_utf8_sequence
|
||||
type Error.t += Unexpected_character of char
|
||||
type Error.t += Undefined_escape_sequence
|
||||
type Error.t += Missing_break
|
||||
type Error.t += Unterminated_string
|
||||
type Error.t += Unterminated_integer
|
||||
type Error.t += Odd_lengthed_bytes
|
||||
type Error.t += Unterminated_comment
|
||||
type Error.t += Orphan_minus
|
||||
type Error.t += Non_canonical_zero
|
||||
type Error.t += Negative_byte_sequence
|
||||
type Error.t += Broken_string
|
||||
type Error.t += Invalid_character_in_string
|
||||
type Error.t += Reserved_name
|
||||
type Error.t += Invalid_symbol
|
||||
type Error.t += Invalid_natural
|
||||
type error =
|
||||
Invalid_utf8_sequence
|
||||
| Unexpected_character of char
|
||||
| Undefined_escape_sequence
|
||||
| Missing_break
|
||||
| Unterminated_string
|
||||
| Unterminated_integer
|
||||
| Odd_lengthed_bytes
|
||||
| Unterminated_comment
|
||||
| Orphan_minus
|
||||
| Non_canonical_zero
|
||||
| Negative_byte_sequence
|
||||
| Broken_string
|
||||
| Invalid_character_in_string
|
||||
| Reserved_name
|
||||
| Invalid_symbol
|
||||
| Invalid_natural
|
||||
|
||||
let error_to_string = function
|
||||
Invalid_utf8_sequence ->
|
||||
@ -393,9 +395,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
Hint: Check the LIGO syntax you use.\n"
|
||||
| Invalid_natural ->
|
||||
"Invalid natural."
|
||||
| _ -> assert false
|
||||
|
||||
exception Error of Error.t Region.reg
|
||||
exception Error of error Region.reg
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
|
@ -7,10 +7,8 @@
|
||||
simple-utils
|
||||
uutf
|
||||
getopt
|
||||
zarith
|
||||
)
|
||||
zarith)
|
||||
(modules
|
||||
Error
|
||||
Lexer
|
||||
LexerLog
|
||||
Utils
|
||||
@ -18,10 +16,8 @@
|
||||
FQueue
|
||||
EvalOpt
|
||||
Version
|
||||
SyntaxError
|
||||
)
|
||||
(modules_without_implementation Error)
|
||||
)
|
||||
SyntaxError))
|
||||
|
||||
|
||||
(rule
|
||||
(targets Version.ml)
|
||||
|
Loading…
Reference in New Issue
Block a user