Merge branch 'rinderknecht@pprint_comments' into 'dev'

Refactoring of the build of the front-end

See merge request ligolang/ligo!698
This commit is contained in:
Christian Rinderknecht 2020-07-01 19:12:01 +00:00
commit 3656d9e1ff
94 changed files with 3157 additions and 2832 deletions

View File

@ -686,8 +686,8 @@ let%expect_test _ =
storage * tokenOwner storage * tokenOwner
function transferContentsIterator function transferContentsIterator
(const gen__P (const gen__P :
: transferContentsIteratorAccumulator * transferContentsIteratorAccumulator *
transferContentsMichelson) is transferContentsMichelson) is
block { block {
const gen__rhs1 = gen__P; const gen__rhs1 = gen__P;

View File

@ -1,11 +1,12 @@
module CST = Cst.Cameligo module CST = Cst.Cameligo
module LexToken = Parser_cameligo.LexToken module LexToken = Lexer_cameligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Scoping = Parser_cameligo.Scoping module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region module Region = Simple_utils.Region
module ParErr = Parser_cameligo.ParErr module ParErr = Parser_cameligo.ParErr
module SSet = Set.Make (String) module SSet = Set.Make (String)
module Pretty = Parser_cameligo.Pretty module Pretty = Parser_cameligo.Pretty
module EvalOpt = Lexer_shared.EvalOpt
(* Mock IOs TODO: Fill them with CLI options *) (* Mock IOs TODO: Fill them with CLI options *)

View File

@ -5,17 +5,24 @@ $HOME/git/OCaml-build/Makefile
../shared/LexerLib.ml ../shared/LexerLib.ml
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml
../shared/FQueue.mli
../shared/LexerLog.mli ../shared/LexerLog.mli
../shared/LexerLog.ml ../shared/LexerLog.ml
../shared/Markup.ml ../shared/Markup.ml
../shared/Markup.mli ../shared/Markup.mli
../shared/Utils.mli
../shared/Utils.ml
../shared/ParserAPI.mli ../shared/ParserAPI.mli
../shared/ParserAPI.ml ../shared/ParserAPI.ml
../shared/LexerUnit.mli ../shared/LexerUnit.mli
../shared/LexerUnit.ml ../shared/LexerUnit.ml
../shared/ParserUnit.mli ../shared/ParserUnit.mli
../shared/ParserUnit.ml ../shared/ParserUnit.ml
./Stubs/Lexer_shared.ml
./Stubs/Lexer_cameligo.ml
./Stubs/Parser_shared.ml
./Stubs/Parser_cameligo.ml
./Stubs/Cst.ml
./Stubs/Cst_cameligo.ml
$HOME/git/ligo/src/stages/1-cst/cameligo/CST.ml
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.mli
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.ml

View File

@ -21,13 +21,16 @@
aliased to [token]. aliased to [token].
*) *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module Markup = Lexer_shared.Markup
type lexeme = string
(* TOKENS *) (* TOKENS *)
type lexeme = string
type t = type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)

View File

@ -1,17 +1,19 @@
(* ocamlex specification for CameLIGO *)
{ {
(* START HEADER *) (* START HEADER *)
type lexeme = string (* Dependencies *)
let sprintf = Printf.sprintf
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module SMap = Utils.String.Map module Markup = Lexer_shared.Markup
module SSet = Utils.String.Set module SMap = Map.Make (String)
module SSet = Set.Make (String)
(* TOKENS *) (* TOKENS *)
type lexeme = string
type t = type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
@ -107,6 +109,8 @@ type t =
(* Projections *) (* Projections *)
let sprintf = Printf.sprintf
type token = t type token = t
let proj_token = function let proj_token = function

View File

@ -1,6 +1,14 @@
(* Driver for the CameLIGO lexer *) (* Driver for the CameLIGO lexer *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerUnit = Lexer_shared.LexerUnit
module LexToken = Lexer_cameligo.LexToken
(* Input/Output *)
module IO = module IO =
struct struct

View File

@ -1,4 +1,5 @@
%{ %{
module LexToken = Lexer_cameligo.LexToken
%} %}
(* Tokens (mirroring thise defined in module LexToken) *) (* Tokens (mirroring thise defined in module LexToken) *)

View File

@ -15,8 +15,8 @@ open CST
(* Entry points *) (* Entry points *)
%start contract interactive_expr %start contract interactive_expr
%type <Cst.Cameligo.t> contract %type <CST.t> contract
%type <Cst.Cameligo.expr> interactive_expr %type <CST.expr> interactive_expr
%% %%

View File

@ -1,7 +1,16 @@
(* Driver for the CameLIGO parser *) (* Driver for the CameLIGO parser *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module LexToken = Lexer_cameligo.LexToken
module CST = Cst.Cameligo
module SSet = Set.Make (String) module SSet = Set.Make (String)
module ParserUnit = Parser_shared.ParserUnit
module Pretty = Parser_cameligo.Pretty
(* Input/Output *)
module IO = module IO =
struct struct
@ -55,22 +64,22 @@ module SubIO =
module Parser = module Parser =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include Parser include Parser_cameligo.Parser
end end
module ParserLog = module ParserLog =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include ParserLog include Cst_cameligo.ParserLog
end end
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Unit = module Unit =
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO) ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
(* Main *) (* Main *)

View File

@ -5,6 +5,7 @@ open CST
module Region = Simple_utils.Region module Region = Simple_utils.Region
open! Region open! Region
open! PPrint open! PPrint
module Option = Simple_utils.Option
let pp_par printer {value; _} = let pp_par printer {value; _} =
string "(" ^^ nest 1 (printer value.inside ^^ string ")") string "(" ^^ nest 1 (printer value.inside ^^ string ")")

View File

@ -0,0 +1,2 @@
Note: The files Scoping.mli and Scoping.ml are the destination of
symbolic links from ../reasonligo.

View File

@ -1,5 +1,7 @@
[@@@warning "-42"] [@@@warning "-42"]
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module CST = Cst.Cameligo module CST = Cst.Cameligo
@ -17,7 +19,7 @@ open Region
(* Useful modules *) (* Useful modules *)
module SSet = Utils.String.Set module SSet = Set.Make (String)
module Ord = module Ord =
struct struct

View File

@ -0,0 +1 @@
module Cameligo = CST

View File

@ -0,0 +1 @@
module ParserLog = ParserLog

View File

@ -0,0 +1 @@
module LexToken = LexToken

View File

@ -0,0 +1,6 @@
module EvalOpt = EvalOpt
module Markup = Markup
module Lexer = Lexer
module LexerUnit = LexerUnit
module LexerLog = LexerLog
module LexerLib = LexerLib

View File

@ -0,0 +1,2 @@
module Pretty = Pretty
module Parser = Parser

View File

@ -0,0 +1 @@
module ParserUnit = ParserUnit

View File

@ -1,3 +0,0 @@
module Parser = Parser
module Lexer = Lexer
module LexToken = LexToken

View File

@ -1,13 +1,48 @@
;; --------------------------------------------------------------------
;; LEXING
;; Build of the lexer ;; Build of the lexer
(ocamllex LexToken) (ocamllex LexToken)
;; Build of the lexer as a library
(library
(name lexer_cameligo)
(public_name ligo.lexer.cameligo)
(modules LexToken)
(libraries
;; Ligo
lexer_shared
;; Third party
hex)
(preprocess
(pps bisect_ppx --conditional)))
;; Build of a standalone lexer
(executable
(name LexerMain)
(libraries
;; Ligo
lexer_shared
lexer_cameligo
;; Third party
hex)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional)))
;; --------------------------------------------------------------------
;; PARSING
;; Build of the parser ;; Build of the parser
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --strict --explain --external-tokens LexToken)) (flags -la 1 --table --strict --explain
--external-tokens Lexer_cameligo.LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -15,50 +50,47 @@
(name parser_cameligo) (name parser_cameligo)
(public_name ligo.parser.cameligo) (public_name ligo.parser.cameligo)
(modules (modules
Scoping cameligo Parser LexToken ParErr Pretty) Scoping Parser ParErr Pretty)
(libraries (libraries
;; Ligo
lexer_cameligo
parser_shared
cst
;; Vendors
simple-utils
;; Third party
pprint pprint
terminal_size terminal_size
menhirLib menhirLib
parser_shared hex)
str
simple-utils
tezos-utils
cst
)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils))) (flags (:standard -open Cst_cameligo))) ;; For CST in Parser.mli
;; Build of the unlexer (for covering the error states of the LR ;; Build of the unlexer (for covering the
;; automaton) ;; error states of the LR automaton)
(executable (executable
(name Unlexer) (name Unlexer)
(libraries str) (libraries str)
(modules Unlexer)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(modules Unlexer))
;; Local build of a standalone lexer
(executable
(name LexerMain)
(libraries parser_cameligo)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Parser_cameligo)))
;; Local build of a standalone parser ;; Local build of a standalone parser
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_cameligo) (libraries
;; Ligo
parser_shared
parser_cameligo
cst
;; Third party
hex)
(modules ParserMain Parser_msg) (modules ParserMain Parser_msg)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
;; Build of the covering of error states in the LR automaton ;; Build of the covering of error states in the LR automaton
@ -85,7 +117,14 @@
(rule (rule
(targets all.mligo) (targets all.mligo)
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe) (deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly ))) (action (run %{script_cover}
--lex-tokens=LexToken.mli
--par-tokens=ParToken.mly
--ext=mligo
--unlexer=./Unlexer.exe
--messages=Parser.msg
--dir=.
--concatenate Parser.mly)))
;; Error messages ;; Error messages
@ -95,8 +134,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--update-errors error.messages.checked-in --update-errors error.messages.checked-in
--table --table
@ -104,18 +142,14 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
))
)
(rule (rule
(target error.messages.new) (target error.messages.new)
(mode (promote (until-clean) (only *))) (mode (promote (until-clean) (only *)))
(action (action
(with-stdout-to %{target} (with-stdout-to %{target}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--list-errors --list-errors
--table --table
@ -123,18 +157,13 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
)
)
)
(alias (alias
(name runtest) (name runtest)
(deps error.messages error.messages.new) (deps error.messages error.messages.new)
(action (action
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -143,12 +172,7 @@
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compare-errors error.messages.new --compare-errors error.messages.new
--compare-errors error.messages --compare-errors error.messages)))
)
)
)
(rule (rule
(targets ParErr.ml) (targets ParErr.ml)
@ -156,8 +180,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -165,7 +188,4 @@
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compile-errors error.messages.checked-in --compile-errors error.messages.checked-in))))
)
))
)

View File

@ -1,6 +1,9 @@
(* Dependencies *)
module EvalOpt = Lexer_shared.EvalOpt
module CST = Cst.Pascaligo module CST = Cst.Pascaligo
module LexToken = Parser_pascaligo.LexToken module LexToken = Lexer_pascaligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Scoping = Parser_pascaligo.Scoping module Scoping = Parser_pascaligo.Scoping
module Region = Simple_utils.Region module Region = Simple_utils.Region
module ParErr = Parser_pascaligo.ParErr module ParErr = Parser_pascaligo.ParErr

View File

@ -6,14 +6,10 @@ $HOME/git/OCaml-build/Makefile
../shared/LexerLib.ml ../shared/LexerLib.ml
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml
../shared/FQueue.mli
../shared/LexerLog.mli ../shared/LexerLog.mli
../shared/LexerLog.ml ../shared/LexerLog.ml
../shared/Markup.ml ../shared/Markup.ml
../shared/Markup.mli ../shared/Markup.mli
../shared/Utils.mli
../shared/Utils.ml
../shared/ParserAPI.mli ../shared/ParserAPI.mli
../shared/ParserAPI.ml ../shared/ParserAPI.ml
../shared/LexerUnit.mli ../shared/LexerUnit.mli
@ -21,3 +17,14 @@ $HOME/git/OCaml-build/Makefile
../shared/ParserUnit.mli ../shared/ParserUnit.mli
../shared/ParserUnit.ml ../shared/ParserUnit.ml
../shared/LexerLib.ml ../shared/LexerLib.ml
./Stubs/Lexer_shared.ml
./Stubs/Lexer_pascaligo.ml
./Stubs/Parser_shared.ml
./Stubs/Parser_pascaligo.ml
./Stubs/Cst.ml
./Stubs/Cst_pascaligo.ml
$HOME/git/ligo/src/stages/1-cst/pascaligo/CST.ml
$HOME/git/ligo/src/stages/1-cst/pascaligo/ParserLog.mli
$HOME/git/ligo/src/stages/1-cst/pascaligo/ParserLog.ml

View File

@ -21,13 +21,16 @@
aliased to [token]. aliased to [token].
*) *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module Markup = Lexer_shared.Markup
type lexeme = string
(* TOKENS *) (* TOKENS *)
type lexeme = string
type attribute = { type attribute = {
header : string; header : string;
string : lexeme Region.reg string : lexeme Region.reg

View File

@ -1,21 +1,20 @@
(* Lexer specification for LIGO, to be processed by [ocamllex] *) (* ocamlex specification for PascaLIGO *)
{ {
(* START HEADER *) (* START HEADER *)
(* Shorthands *) (* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module Markup = Lexer_shared.Markup
module SMap = Map.Make (String) module SMap = Map.Make (String)
module SSet = Set.Make (String) module SSet = Set.Make (String)
type lexeme = string
let sprintf = Printf.sprintf
(* TOKENS *) (* TOKENS *)
type lexeme = string
type attribute = { type attribute = {
header : string; header : string;
string : lexeme Region.reg string : lexeme Region.reg
@ -117,6 +116,8 @@ type t =
(* Projections *) (* Projections *)
let sprintf = Printf.sprintf
type token = t type token = t
let proj_token = function let proj_token = function

View File

@ -1,6 +1,14 @@
(* Driver for the PascaLIGO lexer *) (* Driver for the PascaLIGO lexer *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerUnit = Lexer_shared.LexerUnit
module LexToken = Lexer_pascaligo.LexToken
(* Input/Output *)
module IO = module IO =
struct struct
@ -10,8 +18,12 @@ module IO =
in read ~block ~line:"//" ".ligo" in read ~block ~line:"//" ".ligo"
end end
(* Instantiating the standalone lexer *)
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken)) module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
(* Tracing all tokens in the source *)
let () = let () =
match M.trace () with match M.trace () with
Stdlib.Ok () -> () Stdlib.Ok () -> ()

View File

@ -1,4 +1,5 @@
%{ %{
module LexToken = Lexer_pascaligo.LexToken
%} %}
(* Tokens (mirroring thise defined in module LexToken) *) (* Tokens (mirroring thise defined in module LexToken) *)

View File

@ -1,8 +1,11 @@
(* Menhir specification of the parsing of PascaLIGO *)
%{ %{
(* START HEADER *) (* START HEADER *)
[@@@warning "-42"] [@@@warning "-42"]
(* Dependencies *)
open Simple_utils.Region open Simple_utils.Region
module CST = Cst.Pascaligo module CST = Cst.Pascaligo
open CST open CST
@ -15,8 +18,8 @@ open CST
(* Entry points *) (* Entry points *)
%start contract interactive_expr %start contract interactive_expr
%type <Cst.Pascaligo.t> contract %type <CST.t> contract
%type <Cst.Pascaligo.expr> interactive_expr %type <CST.expr> interactive_expr
%% %%
@ -143,7 +146,9 @@ type_decl:
terminator = $5} terminator = $5}
in {region; value} } in {region; value} }
type_expr_colon: ":" type_expr { $1,$2 } type_annot:
":" type_expr { $1,$2 }
type_expr: type_expr:
fun_type | sum_type | record_type { $1 } fun_type | sum_type | record_type { $1 }
@ -241,7 +246,7 @@ field_decl:
fun_expr: fun_expr:
"function" parameters type_expr_colon? "is" expr { "function" parameters type_annot? "is" expr {
let stop = expr_to_region $5 in let stop = expr_to_region $5 in
let region = cover $1 stop let region = cover $1 stop
and value = {kwd_function = $1; and value = {kwd_function = $1;
@ -254,8 +259,7 @@ fun_expr:
(* Function declarations *) (* Function declarations *)
open_fun_decl: open_fun_decl:
ioption ("recursive") "function" fun_name parameters type_expr_colon? "is" ioption("recursive") "function" fun_name parameters type_annot? "is" expr {
expr {
Scoping.check_reserved_name $3; Scoping.check_reserved_name $3;
let stop = expr_to_region $7 in let stop = expr_to_region $7 in
let region = cover $2 stop let region = cover $2 stop
@ -356,7 +360,7 @@ open_var_decl:
in {region; value} } in {region; value} }
unqualified_decl(OP): unqualified_decl(OP):
var type_expr_colon? OP expr { var type_annot? OP expr {
Scoping.check_reserved_name $1; Scoping.check_reserved_name $1;
let region = expr_to_region $4 let region = expr_to_region $4
in $1, $2, $3, $4, region } in $1, $2, $3, $4, region }
@ -594,28 +598,17 @@ while_loop:
in While {region; value} } in While {region; value} }
for_loop: for_loop:
"for" var ":=" expr "to" expr block { "for" var ":=" expr "to" expr step_clause? block {
let region = cover $1 $7.region in Scoping.check_reserved_name $2;
let region = cover $1 $8.region in
let value = {kwd_for = $1; let value = {kwd_for = $1;
binder = $2; binder = $2;
assign = $3; assign = $3;
init = $4; init = $4;
kwd_to = $5; kwd_to = $5;
bound = $6; bound = $6;
step = None; step = $7;
block = $7} block = $8}
in For (ForInt {region; value})
}
| "for" var ":=" expr "to" expr "step" expr block {
let region = cover $1 $9.region in
let value = {kwd_for = $1;
binder = $2;
assign = $3;
init = $4;
kwd_to = $5;
bound = $6;
step = Some ($7, $8);
block = $9}
in For (ForInt {region; value}) in For (ForInt {region; value})
} }
| "for" var arrow_clause? "in" collection expr block { | "for" var arrow_clause? "in" collection expr block {
@ -630,6 +623,9 @@ for_loop:
block = $7} block = $7}
in For (ForCollect {region; value}) } in For (ForCollect {region; value}) }
step_clause:
"step" expr { $1,$2 }
collection: collection:
"map" { Map $1 } "map" { Map $1 }
| "set" { Set $1 } | "set" { Set $1 }
@ -655,13 +651,8 @@ block_with :
let start = $2 let start = $2
and stop = expr_to_region $3 in and stop = expr_to_region $3 in
let region = cover start stop in let region = cover start stop in
let value : CST.block_with = { let value = {block=$1; kwd_with=$2; expr=$3}
block = $1; in {region; value} }
kwd_with = $2;
expr = $3;
}
in {value;region}
}
cond_expr: cond_expr:
"if" expr "then" expr ";"? "else" expr { "if" expr "then" expr ";"? "else" expr {

View File

@ -1,7 +1,16 @@
(* Driver for the PascaLIGO parser *) (* Driver for the PascaLIGO parser *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module LexToken = Lexer_pascaligo.LexToken
module CST = Cst.Pascaligo
module SSet = Set.Make (String) module SSet = Set.Make (String)
module ParserUnit = Parser_shared.ParserUnit
module Pretty = Parser_pascaligo.Pretty
(* Input/Output *)
module IO = module IO =
struct struct
@ -55,22 +64,22 @@ module SubIO =
module Parser = module Parser =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include Parser include Parser_pascaligo.Parser
end end
module ParserLog = module ParserLog =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include ParserLog include Cst_pascaligo.ParserLog
end end
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Unit = module Unit =
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO) ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
(* Main *) (* Main *)

View File

@ -40,12 +40,16 @@ and pp_attr_decl decl = pp_ne_injection pp_string decl
and pp_const_decl {value; _} = and pp_const_decl {value; _} =
let {name; const_type; init; attributes; _} = value in let {name; const_type; init; attributes; _} = value in
let start = string ("const " ^ name.value) in
let t_expr = const_type in
let attr = match attributes with let attr = match attributes with
None -> empty None -> empty
| Some a -> hardline ^^ pp_attr_decl a in | Some a -> hardline ^^ pp_attr_decl a in
group (start ^/^ pp_option (fun (_, d) -> nest 2 (string ": " ^^ pp_type_expr d)) t_expr) let start = string ("const " ^ name.value) in
let start =
match const_type with
None -> start
| Some (_, e) ->
group (start ^/^ nest 2 (string ": " ^^ pp_type_expr e)) in
start
^^ group (break 1 ^^ nest 2 (string "= " ^^ pp_expr init)) ^^ group (break 1 ^^ nest 2 (string "= " ^^ pp_expr init))
^^ attr ^^ attr
@ -129,22 +133,33 @@ and pp_fun_expr {value; _} =
let {param; ret_type; return; _} : fun_expr = value in let {param; ret_type; return; _} : fun_expr = value in
let start = string "function" in let start = string "function" in
let parameters = pp_par pp_parameters param in let parameters = pp_par pp_parameters param in
let expr = pp_expr return in let t_annot =
match ret_type with
None -> empty
| Some (_, e) ->
group (break 1 ^^ nest 2 (string ": " ^^ pp_type_expr e)) in
group (start ^^ nest 2 (break 1 ^^ parameters)) group (start ^^ nest 2 (break 1 ^^ parameters))
^^ pp_option (fun (_,d) -> group (break 1 ^^ nest 2 (string ": " ^^ pp_type_expr d))) ret_type ^^ t_annot
^^ string " is" ^^ group (nest 4 (break 1 ^^ expr)) ^^ string " is" ^^ group (nest 4 (break 1 ^^ pp_expr return))
and pp_fun_decl {value; _} = and pp_fun_decl {value; _} =
let {kwd_recursive; fun_name; param; let {kwd_recursive; fun_name; param; ret_type;
ret_type; return; attributes; _} = value in return; attributes; _} = value in
let start = let start =
match kwd_recursive with match kwd_recursive with
None -> string "function" None -> string "function"
| Some _ -> string "recursive" ^/^ string "function" in | Some _ -> string "recursive" ^/^ string "function" in
let start = start ^^ group (break 1 ^^ nest 2 (pp_ident fun_name)) in let start = start ^^ group (break 1 ^^ nest 2 (pp_ident fun_name))
let parameters = pp_par pp_parameters param in and parameters = pp_par pp_parameters param
and t_annot_is =
match ret_type with
None -> string " is"
| Some (_, e) ->
let ret_type = pp_type_expr e in
group (nest 2 (break 1 ^^ string ": " ^^ nest 2 ret_type
^^ string " is"))
and body =
let expr = pp_expr return in let expr = pp_expr return in
let body =
match return with match return with
EBlock _ -> group (break 1 ^^ expr) EBlock _ -> group (break 1 ^^ expr)
| _ -> group (nest 2 (break 1 ^^ expr)) | _ -> group (nest 2 (break 1 ^^ expr))
@ -153,8 +168,9 @@ and pp_fun_decl {value; _} =
None -> empty None -> empty
| Some a -> hardline ^^ pp_attr_decl a in | Some a -> hardline ^^ pp_attr_decl a in
prefix 2 1 start parameters prefix 2 1 start parameters
^^ group (nest 2 (pp_option (fun (_, d) -> break 1 ^^ string ": " ^^ nest 2 (pp_type_expr d)) ret_type ^^ string " is")) ^^ t_annot_is
^^ body ^^ attr ^^ body
^^ attr
and pp_parameters p = pp_nsepseq ";" pp_param_decl p and pp_parameters p = pp_nsepseq ";" pp_param_decl p
@ -164,13 +180,19 @@ and pp_param_decl = function
and pp_param_const {value; _} = and pp_param_const {value; _} =
let {var; param_type; _} : param_const = value in let {var; param_type; _} : param_const = value in
let name = string ("const " ^ var.value) let name = string ("const " ^ var.value) in
in prefix 2 1 name @@ pp_option (fun (_,d) -> string ": " ^^ pp_type_expr d) param_type match param_type with
None -> name
| Some (_, e) ->
prefix 2 1 (name ^^ string " :") (pp_type_expr e)
and pp_param_var {value; _} = and pp_param_var {value; _} =
let {var; param_type; _} : param_var = value in let {var; param_type; _} : param_var = value in
let name = string ("var " ^ var.value) let name = string ("var " ^ var.value) in
in prefix 2 1 name @@ pp_option (fun (_,d) -> string ": " ^^ pp_type_expr d) param_type match param_type with
None -> name
| Some (_, e) ->
prefix 2 1 (name ^^ string " :") (pp_type_expr e)
and pp_block {value; _} = and pp_block {value; _} =
string "block {" string "block {"
@ -192,7 +214,12 @@ and pp_data_decl = function
and pp_var_decl {value; _} = and pp_var_decl {value; _} =
let {name; var_type; init; _} = value in let {name; var_type; init; _} = value in
let start = string ("var " ^ name.value) in let start = string ("var " ^ name.value) in
group (start ^/^ pp_option (fun (_,d) -> nest 2 (string ": " ^^ pp_type_expr d)) var_type) let start =
match var_type with
None -> start
| Some (_, e) ->
group (start ^/^ nest 2 (string ": " ^^ pp_type_expr e)) in
start
^^ group (break 1 ^^ nest 2 (string ":= " ^^ pp_expr init)) ^^ group (break 1 ^^ nest 2 (string ":= " ^^ pp_expr init))
and pp_instruction = function and pp_instruction = function
@ -381,7 +408,7 @@ and pp_expr = function
| EBlock e -> pp_block_with e | EBlock e -> pp_block_with e
and pp_block_with {value; _} = and pp_block_with {value; _} =
let {block;kwd_with; expr;_} = value in let {block; kwd_with; expr} = value in
let expr = value.expr in let expr = value.expr in
let expr = pp_expr expr in let expr = pp_expr expr in
group (pp_block block ^^ string " with" group (pp_block block ^^ string " with"

View File

@ -1,8 +1,14 @@
(* This module exports checks on scoping, called from the parser. *)
[@@@warning "-42"] [@@@warning "-42"]
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module CST = Cst.Pascaligo module CST = Cst.Pascaligo
(* Errors *)
type t = type t =
Reserved_name of CST.variable Reserved_name of CST.variable
| Duplicate_parameter of CST.variable | Duplicate_parameter of CST.variable
@ -18,7 +24,7 @@ open Region
(* Useful modules *) (* Useful modules *)
module SSet = Utils.String.Set module SSet = Set.Make (String)
module Ord = module Ord =
struct struct

View File

@ -1,8 +1,12 @@
(* This module exports checks on scoping, called from the parser. *) (* This module exports checks on scoping, called from the parser. *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module CST = Cst.Pascaligo module CST = Cst.Pascaligo
(* Errors *)
type t = type t =
Reserved_name of CST.variable Reserved_name of CST.variable
| Duplicate_parameter of CST.variable | Duplicate_parameter of CST.variable

View File

@ -0,0 +1 @@
module Pascaligo = CST

View File

@ -0,0 +1 @@
module ParserLog = ParserLog

View File

@ -0,0 +1 @@
module LexToken = LexToken

View File

@ -0,0 +1,6 @@
module EvalOpt = EvalOpt
module Markup = Markup
module Lexer = Lexer
module LexerUnit = LexerUnit
module LexerLog = LexerLog
module LexerLib = LexerLib

View File

@ -0,0 +1,2 @@
module Pretty = Pretty
module Parser = Parser

View File

@ -0,0 +1 @@
module ParserUnit = ParserUnit

View File

@ -1,13 +1,48 @@
;; --------------------------------------------------------------------
;; LEXING
;; Build of the lexer ;; Build of the lexer
(ocamllex LexToken) (ocamllex LexToken)
;; Build of the lexer as a library
(library
(name lexer_pascaligo)
(public_name ligo.lexer.pascaligo)
(modules LexToken)
(libraries
;; Ligo
lexer_shared
;; Third party
hex)
(preprocess
(pps bisect_ppx --conditional)))
;; Build of a standalone lexer
(executable
(name LexerMain)
(libraries
;; Ligo
lexer_shared
lexer_pascaligo
;; Third party
hex)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional)))
;; --------------------------------------------------------------------
;; PARSING
;; Build of the parser ;; Build of the parser
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --strict --explain --external-tokens LexToken)) (flags -la 1 --table --strict --explain
--external-tokens Lexer_pascaligo.LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -15,19 +50,22 @@
(name parser_pascaligo) (name parser_pascaligo)
(public_name ligo.parser.pascaligo) (public_name ligo.parser.pascaligo)
(modules (modules
Scoping pascaligo Parser LexToken ParErr Pretty) Scoping Parser ParErr Pretty)
(libraries (libraries
;; Ligo
lexer_pascaligo
parser_shared
cst
;; Vendors
simple-utils
;; Third party
pprint pprint
terminal_size terminal_size
menhirLib menhirLib
parser_shared hex)
hex
Preprocessor
simple-utils
cst)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils))) (flags (:standard -open Cst_pascaligo))) ;; For CST in Parser.mli
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -35,30 +73,24 @@
(executable (executable
(name Unlexer) (name Unlexer)
(libraries str) (libraries str)
(modules Unlexer)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(modules Unlexer))
;; Local build of a standalone lexer
(executable
(name LexerMain)
(libraries
hex simple-utils tezos-utils parser_pascaligo)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Parser_pascaligo)))
;; Local build of a standalone parser ;; Local build of a standalone parser
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_pascaligo) (libraries
(modules ParserMain) ;; Ligo
parser_shared
parser_pascaligo
cst
;; Third party
hex)
(modules ParserMain Parser_msg)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
;; Build of the covering of error states in the LR automaton ;; Build of the covering of error states in the LR automaton
@ -85,7 +117,14 @@
(rule (rule
(targets all.ligo) (targets all.ligo)
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe) (deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=ligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly))) (action (run %{script_cover}
--lex-tokens=LexToken.mli
--par-tokens=ParToken.mly
--ext=ligo
--unlexer=./Unlexer.exe
--messages=Parser.msg
--dir=.
--concatenate Parser.mly)))
;; Error messages ;; Error messages
@ -95,8 +134,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--update-errors error.messages.checked-in --update-errors error.messages.checked-in
--table --table
@ -104,18 +142,14 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
))
)
(rule (rule
(target error.messages.new) (target error.messages.new)
(mode (promote (until-clean) (only *))) (mode (promote (until-clean) (only *)))
(action (action
(with-stdout-to %{target} (with-stdout-to %{target}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--list-errors --list-errors
--table --table
@ -123,18 +157,13 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
)
)
)
(alias (alias
(name runtest) (name runtest)
(deps error.messages error.messages.new) (deps error.messages error.messages.new)
(action (action
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -143,10 +172,7 @@
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compare-errors error.messages.new --compare-errors error.messages.new
--compare-errors error.messages --compare-errors error.messages)))
)
)
)
(rule (rule
(targets ParErr.ml) (targets ParErr.ml)
@ -154,8 +180,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -163,7 +188,4 @@
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compile-errors error.messages.checked-in --compile-errors error.messages.checked-in))))
)
))
)

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
module Lexer = Lexer
module LexToken = LexToken
module Parser = Parser

View File

@ -1,12 +1,13 @@
module CST = Cst.Cameligo module CST = Cst.Cameligo
module LexToken = Parser_reasonligo.LexToken module LexToken = Lexer_reasonligo.LexToken
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Scoping = Parser_cameligo.Scoping module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError module SyntaxError = Parser_reasonligo.SyntaxError
module SSet = Set.Make (String) module SSet = Set.Make (String)
module Pretty = Parser_reasonligo.Pretty module Pretty = Parser_reasonligo.Pretty
module EvalOpt = Lexer_shared.EvalOpt
(* Mock IOs TODO: Fill them with CLI options *) (* Mock IOs TODO: Fill them with CLI options *)

View File

@ -5,14 +5,10 @@ $HOME/git/OCaml-build/Makefile
../shared/LexerLib.ml ../shared/LexerLib.ml
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml
../shared/FQueue.mli
../shared/LexerLog.mli ../shared/LexerLog.mli
../shared/LexerLog.ml ../shared/LexerLog.ml
../shared/Markup.ml ../shared/Markup.ml
../shared/Markup.mli ../shared/Markup.mli
../shared/Utils.mli
../shared/Utils.ml
../shared/ParserAPI.mli ../shared/ParserAPI.mli
../shared/ParserAPI.ml ../shared/ParserAPI.ml
../shared/LexerUnit.mli ../shared/LexerUnit.mli
@ -20,10 +16,13 @@ $HOME/git/OCaml-build/Makefile
../shared/ParserUnit.mli ../shared/ParserUnit.mli
../shared/ParserUnit.ml ../shared/ParserUnit.ml
Stubs/Parser_cameligo.ml ./Stubs/Lexer_shared.ml
./Stubs/Lexer_reasonligo.ml
./Stubs/Parser_shared.ml
./Stubs/Parser_reasonligo.ml
./Stubs/Cst.ml
./Stubs/Cst_cameligo.ml
../cameligo/AST.ml $HOME/git/ligo/src/stages/1-cst/cameligo/CST.ml
../cameligo/ParserLog.mli $HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.mli
../cameligo/ParserLog.ml $HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.ml
../cameligo/Scoping.mli
../cameligo/Scoping.ml

View File

@ -21,13 +21,16 @@
aliased to [token]. aliased to [token].
*) *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module Markup = Lexer_shared.Markup
type lexeme = string
(* TOKENS *) (* TOKENS *)
type lexeme = string
type t = type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)

View File

@ -1,19 +1,19 @@
(* ocamlex specification for ReasonLIGO *)
{ {
(* START OF HEADER *) (* START OF HEADER *)
(* Shorthands *) (* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module SMap = Utils.String.Map module Markup = Lexer_shared.Markup
module SSet = Utils.String.Set module SMap = Map.Make (String)
module SSet = Set.Make (String)
type lexeme = string
let sprintf = Printf.sprintf
(* TOKENS *) (* TOKENS *)
type lexeme = string
type t = type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
@ -103,6 +103,8 @@ type t =
(* Projections *) (* Projections *)
let sprintf = Printf.sprintf
type token = t type token = t
let proj_token = function let proj_token = function

View File

@ -1,6 +1,14 @@
(* Driver for the ReasonLIGO lexer *) (* Driver for the ReasonLIGO lexer *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerUnit = Lexer_shared.LexerUnit
module LexToken = Lexer_reasonligo.LexToken
(* Input/Output *)
module IO = module IO =
struct struct

View File

@ -1,4 +1,5 @@
%{ %{
module LexToken = Lexer_reasonligo.LexToken
%} %}
(* Tokens (mirroring those defined in module LexToken) *) (* Tokens (mirroring those defined in module LexToken) *)

View File

@ -1,7 +1,16 @@
(* Driver for the ReasonLIGO parser *) (* Driver for the ReasonLIGO parser *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module LexToken = Lexer_reasonligo.LexToken
module CST = Cst.Cameligo
module SSet = Set.Make (String) module SSet = Set.Make (String)
module ParserUnit = Parser_shared.ParserUnit
module Pretty = Parser_reasonligo.Pretty
(* Input/Output *)
module IO = module IO =
struct struct
@ -55,22 +64,22 @@ module SubIO =
module Parser = module Parser =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include Parser include Parser_reasonligo.Parser
end end
module ParserLog = module ParserLog =
struct struct
type ast = AST.t type ast = CST.t
type expr = AST.expr type expr = CST.expr
include ParserLog include Cst_cameligo.ParserLog
end end
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer_shared.Lexer.Make (LexToken)
module Unit = module Unit =
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO) ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
(* Main *) (* Main *)

View File

@ -5,6 +5,7 @@ open CST
module Region = Simple_utils.Region module Region = Simple_utils.Region
open! Region open! Region
open! PPrint open! PPrint
module Option = Simple_utils.Option
let rec print ast = let rec print ast =
let app decl = group (pp_declaration decl) in let app decl = group (pp_declaration decl) in

View File

@ -0,0 +1 @@
../cameligo/Scoping.ml

View File

@ -0,0 +1 @@
../cameligo/Scoping.mli

View File

@ -0,0 +1 @@
module Cameligo = CST

View File

@ -0,0 +1 @@
module ParserLog = ParserLog

View File

@ -0,0 +1 @@
module LexToken = LexToken

View File

@ -0,0 +1,6 @@
module EvalOpt = EvalOpt
module Markup = Markup
module Lexer = Lexer
module LexerUnit = LexerUnit
module LexerLog = LexerLog
module LexerLib = LexerLib

View File

@ -1 +0,0 @@
module AST = AST

View File

@ -0,0 +1,2 @@
module Pretty = Pretty
module Parser = Parser

View File

@ -0,0 +1 @@
module ParserUnit = ParserUnit

View File

@ -1,13 +1,48 @@
;; --------------------------------------------------------------------
;; LEXING
;; Build of the lexer ;; Build of the lexer
(ocamllex LexToken) (ocamllex LexToken)
;; Build of the lexer as a library
(library
(name lexer_reasonligo)
(public_name ligo.lexer.reasonligo)
(modules LexToken)
(libraries
;; Ligo
lexer_shared
;; Third party
hex)
(preprocess
(pps bisect_ppx --conditional)))
;; Build of a standalone lexer
(executable
(name LexerMain)
(libraries
;; Ligo
lexer_shared
lexer_reasonligo
;; Third party
hex)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional)))
;; --------------------------------------------------------------------
;; PARSING
;; Build of the parser ;; Build of the parser
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --strict --explain --external-tokens LexToken)) (flags -la 1 --table --strict --explain
--external-tokens Lexer_reasonligo.LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -15,16 +50,22 @@
(name parser_reasonligo) (name parser_reasonligo)
(public_name ligo.parser.reasonligo) (public_name ligo.parser.reasonligo)
(modules (modules
SyntaxError reasonligo LexToken ParErr Parser Pretty) SyntaxError Scoping Parser ParErr Pretty)
(libraries (libraries
menhirLib ;; Ligo
lexer_reasonligo
parser_shared parser_shared
parser_cameligo cst
str ;; Vendors
simple-utils) simple-utils
;; Third party
pprint
terminal_size
menhirLib
hex)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils -open Parser_cameligo))) (flags (:standard -open Cst_cameligo))) ;; For CST in Parser.mli
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -32,31 +73,24 @@
(executable (executable
(name Unlexer) (name Unlexer)
(libraries str) (libraries str)
(modules Unlexer)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(modules Unlexer))
;; Local build of a standalone lexer
(executable
(name LexerMain)
(libraries parser_reasonligo)
(modules LexerMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Parser_reasonligo)))
;; Local build of a standalone parser ;; Local build of a standalone parser
(executable (executable
(name ParserMain) (name ParserMain)
(libraries (libraries
;; Ligo
parser_shared
parser_reasonligo parser_reasonligo
parser_cameligo) cst
(modules ParserMain) ;; Third party
hex)
(modules ParserMain Parser_msg)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional)))
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))
;; Build of the covering of error states in the LR automaton ;; Build of the covering of error states in the LR automaton
@ -83,7 +117,16 @@
(rule (rule
(targets all.religo) (targets all.religo)
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe) (deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=religo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly ))) (action (run %{script_cover}
--lex-tokens=LexToken.mli
--par-tokens=ParToken.mly
--ext=religo
--unlexer=./Unlexer.exe
--messages=Parser.msg
--dir=.
--concatenate Parser.mly)))
;; Error messages
(rule (rule
(targets error.messages) (targets error.messages)
@ -91,8 +134,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--update-errors error.messages.checked-in --update-errors error.messages.checked-in
--table --table
@ -100,18 +142,14 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
))
)
(rule (rule
(target error.messages.new) (target error.messages.new)
(mode (promote (until-clean) (only *))) (mode (promote (until-clean) (only *)))
(action (action
(with-stdout-to %{target} (with-stdout-to %{target}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--list-errors --list-errors
--table --table
@ -119,18 +157,13 @@
--external-tokens LexToken.mli --external-tokens LexToken.mli
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly))))
)
)
)
)
(alias (alias
(name runtest) (name runtest)
(deps error.messages error.messages.new) (deps error.messages error.messages.new)
(action (action
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -139,12 +172,7 @@
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compare-errors error.messages.new --compare-errors error.messages.new
--compare-errors error.messages --compare-errors error.messages)))
)
)
)
(rule (rule
(targets ParErr.ml) (targets ParErr.ml)
@ -152,8 +180,7 @@
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli) (deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
(action (action
(with-stdout-to %{targets} (with-stdout-to %{targets}
(run (run menhir
menhir
--unused-tokens --unused-tokens
--table --table
--strict --strict
@ -161,7 +188,4 @@
--base Parser.mly --base Parser.mly
ParToken.mly ParToken.mly
Parser.mly Parser.mly
--compile-errors error.messages.checked-in --compile-errors error.messages.checked-in))))
)
))
)

View File

@ -1,4 +0,0 @@
module Parser = Parser
module Lexer = Lexer
module LexToken = LexToken
module SyntaxError = SyntaxError

View File

@ -439,11 +439,11 @@ and scan state = parse
(* String *) (* String *)
| '"' { let opening, _, state = state#sync lexbuf in | '"' { let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening "" in let thread = LexerLib.mk_thread opening in
scan_string thread state lexbuf |> mk_string } scan_string thread state lexbuf |> mk_string }
| "{|" { let opening, _, state = state#sync lexbuf in | "{|" { let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening "" in let thread = LexerLib.mk_thread opening in
scan_verbatim thread state lexbuf |> mk_verbatim } scan_verbatim thread state lexbuf |> mk_verbatim }
(* Comments *) (* Comments *)
@ -453,33 +453,28 @@ and scan state = parse
match state#block with match state#block with
Some block when block#opening = lexeme -> Some block when block#opening = lexeme ->
let opening, _, state = state#sync lexbuf in let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening lexeme in let thread = LexerLib.mk_thread opening in
let thread, state = scan_block block thread state lexbuf in let thread = thread#push_string lexeme in
let state = state#push_block thread let thread, state = scan_block block thread state lexbuf
in scan state lexbuf in scan (state#push_block thread) lexbuf
| Some _ | None -> | Some _ | None -> (* Not a comment for this LIGO syntax *)
let n = String.length lexeme in let n = String.length lexeme in
begin let () = LexerLib.rollback lexbuf in
LexerLib.rollback lexbuf; scan (scan_n_sym n state lexbuf) lexbuf }
assert (n > 0);
scan (scan_n_sym n state lexbuf) lexbuf
end }
| line_comments { | line_comments {
let lexeme = Lexing.lexeme lexbuf in let lexeme = Lexing.lexeme lexbuf in
match state#line with match state#line with
Some line when line = lexeme -> Some line when line = lexeme ->
let opening, _, state = state#sync lexbuf in let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening lexeme in let thread = LexerLib.mk_thread opening in
let thread, state = scan_line thread state lexbuf in let thread = thread#push_string lexeme in
let state = state#push_line thread let thread, state = scan_line thread state lexbuf
in scan state lexbuf in scan (state#push_line thread) lexbuf
| Some _ | None -> | Some _ | None -> (* Not a comment for this LIGO syntax *)
let n = String.length lexeme in let n = String.length lexeme in
begin let () = LexerLib.rollback lexbuf in
LexerLib.rollback lexbuf; scan (scan_n_sym n state lexbuf) lexbuf }
scan (scan_n_sym n state lexbuf) lexbuf
end }
| _ as c { let region, _, _ = state#sync lexbuf | _ as c { let region, _, _ = state#sync lexbuf
in fail region (Unexpected_character c) } in fail region (Unexpected_character c) }
@ -488,8 +483,7 @@ and scan state = parse
and scan_n_sym n state = parse and scan_n_sym n state = parse
symbol { let state = mk_sym state lexbuf in symbol { let state = mk_sym state lexbuf in
if n = 1 then state if n = 1 then state else scan_n_sym (n-1) state lexbuf }
else scan_n_sym (n-1) state lexbuf }
(* Scanning #include flag *) (* Scanning #include flag *)
@ -552,7 +546,6 @@ and scan_block block thread state = parse
in scan_block block thread state lexbuf in scan_block block thread state lexbuf
else let () = LexerLib.rollback lexbuf in else let () = LexerLib.rollback lexbuf in
let n = String.length lexeme in let n = String.length lexeme in
let () = assert (n > 0) in
let state = scan_n_sym n state lexbuf let state = scan_n_sym n state lexbuf
in scan_block block thread state lexbuf } in scan_block block thread state lexbuf }
@ -563,7 +556,6 @@ and scan_block block thread state = parse
in thread#push_string lexeme, state in thread#push_string lexeme, state
else let () = LexerLib.rollback lexbuf in else let () = LexerLib.rollback lexbuf in
let n = String.length lexeme in let n = String.length lexeme in
let () = assert (n > 0) in
let state = scan_n_sym n state lexbuf let state = scan_n_sym n state lexbuf
in scan_block block thread state lexbuf } in scan_block block thread state lexbuf }
@ -588,6 +580,18 @@ and scan_block block thread state = parse
let region = Region.make ~start:state#pos ~stop:pos let region = Region.make ~start:state#pos ~stop:pos
in fail region error } in fail region error }
and scan_utf8 block thread state = parse
eof { let err = Unterminated_comment block#closing
in fail thread#opening err }
| _ as c { let thread = thread#push_char c in
let lexeme = Lexing.lexeme lexbuf in
let () = state#supply (Bytes.of_string lexeme) 0 1 in
match Uutf.decode state#decoder with
`Uchar _ -> thread, Stdlib.Ok ()
| `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
| `Await -> scan_utf8 block thread state lexbuf
| `End -> assert false }
(* Finishing a line comment *) (* Finishing a line comment *)
and scan_line thread state = parse and scan_line thread state = parse
@ -609,18 +613,6 @@ and scan_line thread state = parse
let region = Region.make ~start:state#pos ~stop:pos let region = Region.make ~start:state#pos ~stop:pos
in fail region error } in fail region error }
and scan_utf8 block thread state = parse
eof { let err = Unterminated_comment block#closing
in fail thread#opening err }
| _ as c { let thread = thread#push_char c in
let lexeme = Lexing.lexeme lexbuf in
let () = state#supply (Bytes.of_string lexeme) 0 1 in
match Uutf.decode state#decoder with
`Uchar _ -> thread, Stdlib.Ok ()
| `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
| `Await -> scan_utf8 block thread state lexbuf
| `End -> assert false }
and scan_utf8_inline thread state = parse and scan_utf8_inline thread state = parse
eof { thread, Stdlib.Ok () } eof { thread, Stdlib.Ok () }
| _ as c { let thread = thread#push_char c in | _ as c { let thread = thread#push_char c in

View File

@ -1,5 +1,8 @@
(* A library for writing UTF8-aware lexers *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module FQueue = Simple_utils.FQueue
(* LEXER ENGINE *) (* LEXER ENGINE *)
@ -69,7 +72,7 @@ type thread = <
set_opening : Region.t -> thread set_opening : Region.t -> thread
> >
let mk_thread region lexeme : thread = let mk_thread region : thread =
(* The call [explode s a] is the list made by pushing the characters (* The call [explode s a] is the list made by pushing the characters
in the string [s] on top of [a], in reverse order. For example, in the string [s] on top of [a], in reverse order. For example,
[explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *) [explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *)
@ -83,10 +86,10 @@ let mk_thread region lexeme : thread =
val opening = region val opening = region
method opening = opening method opening = opening
val length = String.length lexeme val length = 0
method length = length method length = length
val acc = explode lexeme [] val acc = []
method acc = acc method acc = acc
method set_opening opening = {< opening; length; acc >} method set_opening opening = {< opening; length; acc >}
@ -100,10 +103,10 @@ let mk_thread region lexeme : thread =
acc = explode str acc >} acc = explode str acc >}
(* The value of [thread#to_string] is a string of length (* The value of [thread#to_string] is a string of length
[thread#length] containing the [thread#length] characters in [thread#length] containing the characters in the list
the list [thread#acc], in reverse order. For instance, [thread#acc], in reverse order. For instance, [thread#to_string
[thread#to_string = "abc"] if [thread#length = 3] and = "abc"] if [thread#length = 3] and [thread#acc =
[thread#acc = ['c';'b';'a']]. *) ['c';'b';'a']]. *)
method to_string = method to_string =
let bytes = Bytes.make length ' ' in let bytes = Bytes.make length ' ' in
@ -161,6 +164,7 @@ type 'token window =
type 'token state = < type 'token state = <
units : (Markup.t list * 'token) FQueue.t; units : (Markup.t list * 'token) FQueue.t;
markup : Markup.t list; markup : Markup.t list;
comments : Markup.comment FQueue.t;
window : 'token window; window : 'token window;
last : Region.t; last : Region.t;
pos : Pos.t; pos : Pos.t;
@ -184,15 +188,18 @@ type 'token state = <
push_tabs : Lexing.lexbuf -> 'token state; push_tabs : Lexing.lexbuf -> 'token state;
push_bom : Lexing.lexbuf -> 'token state; push_bom : Lexing.lexbuf -> 'token state;
push_markup : Markup.t -> 'token state; push_markup : Markup.t -> 'token state;
push_comment : Markup.comment -> 'token state
> >
let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply let mk_state ~units ~markup ~comments ~window ~last ~pos ~decoder ~supply
?block ?line () : _ state = ?block ?line () : _ state =
object (self) object (self)
val units = units val units = units
method units = units method units = units
val markup = markup val markup = markup
method markup = markup method markup = markup
val comments = comments
method comments = comments
val window = window val window = window
method window = window method window = window
val last = last val last = last
@ -229,6 +236,9 @@ let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply
(* Committing markup to the current logical state *) (* Committing markup to the current logical state *)
method push_comment comment =
{< comments = FQueue.enq comment comments >}
method push_markup unit = {< markup = unit :: markup >} method push_markup unit = {< markup = unit :: markup >}
method push_newline buffer = method push_newline buffer =
@ -238,21 +248,23 @@ let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply
let stop = start#new_line value in let stop = start#new_line value in
let region = Region.make ~start ~stop in let region = Region.make ~start ~stop in
let unit = Markup.Newline Region.{region; value} let unit = Markup.Newline Region.{region; value}
in {< pos = stop; markup = unit::markup >} in (self#push_markup unit)#set_pos stop
method push_line thread = method push_line thread =
let start = thread#opening#start in let start = thread#opening#start in
let region = Region.make ~start ~stop:self#pos let region = Region.make ~start ~stop:self#pos
and value = thread#to_string in and value = thread#to_string in
let unit = Markup.LineCom Region.{region; value} let reg = Region.{region; value} in
in {< markup = unit::markup >} let unit = Markup.LineCom reg
in (self#push_markup unit)#push_comment (Markup.Line reg)
method push_block thread = method push_block thread =
let start = thread#opening#start in let start = thread#opening#start in
let region = Region.make ~start ~stop:self#pos let region = Region.make ~start ~stop:self#pos
and value = thread#to_string in and value = thread#to_string in
let unit = Markup.BlockCom Region.{region; value} let reg = Region.{region; value} in
in {< markup = unit::markup >} let unit = Markup.BlockCom reg
in (self#push_markup unit)#push_comment (Markup.Block reg)
method push_space buffer = method push_space buffer =
let region, lex, state = self#sync buffer in let region, lex, state = self#sync buffer in
@ -286,11 +298,12 @@ type 'token instance = {
input : input; input : input;
read : log:('token logger) -> Lexing.lexbuf -> 'token; read : log:('token logger) -> Lexing.lexbuf -> 'token;
buffer : Lexing.lexbuf; buffer : Lexing.lexbuf;
close : unit -> unit;
get_win : unit -> 'token window; get_win : unit -> 'token window;
get_pos : unit -> Pos.t; get_pos : unit -> Pos.t;
get_last : unit -> Region.t; get_last : unit -> Region.t;
get_file : unit -> file_path; get_file : unit -> file_path;
close : unit -> unit get_comments : unit -> Markup.comment FQueue.t
} }
type open_err = File_opening of string type open_err = File_opening of string
@ -329,14 +342,17 @@ let open_token_stream ?line ?block ~scan
~window:Nil ~window:Nil
~pos ~pos
~markup:[] ~markup:[]
~comments:FQueue.empty
~decoder ~decoder
~supply ~supply
?block ?block
?line ?line
()) in ()) in
let get_pos () = !state#pos let get_pos () = !state#pos
and get_last () = !state#last and get_last () = !state#last
and get_win () = !state#window and get_win () = !state#window
and get_comments () = !state#comments
and get_file () = file_path in and get_file () = file_path in
let patch_buffer (start, stop) buffer = let patch_buffer (start, stop) buffer =
@ -382,6 +398,7 @@ let open_token_stream ?line ?block ~scan
| _ -> () in | _ -> () in
let instance = { let instance = {
read = read scan ~token_to_region ~style; read = read scan ~token_to_region ~style;
input; buffer; get_win; get_pos; get_last; get_file; close} input; buffer; close;
get_win; get_pos; get_last; get_file; get_comments}
in Ok instance in Ok instance
| Error _ as e -> e | Error _ as e -> e

View File

@ -2,6 +2,7 @@
module Region = Simple_utils.Region module Region = Simple_utils.Region
module Pos = Simple_utils.Pos module Pos = Simple_utils.Pos
module FQueue = Simple_utils.FQueue
(* The function [rollback] resets the lexing buffer to the state it (* The function [rollback] resets the lexing buffer to the state it
was when it matched the last regular expression. This function is was when it matched the last regular expression. This function is
@ -39,7 +40,7 @@ type thread = <
set_opening : Region.t -> thread set_opening : Region.t -> thread
> >
val mk_thread : Region.t -> lexeme -> thread val mk_thread : Region.t -> thread
(* STATE *) (* STATE *)
@ -110,6 +111,7 @@ type 'token window =
type 'token state = < type 'token state = <
units : (Markup.t list * 'token) FQueue.t; units : (Markup.t list * 'token) FQueue.t;
markup : Markup.t list; markup : Markup.t list;
comments : Markup.comment FQueue.t;
window : 'token window; window : 'token window;
last : Region.t; last : Region.t;
pos : Pos.t; pos : Pos.t;
@ -133,6 +135,7 @@ type 'token state = <
push_tabs : Lexing.lexbuf -> 'token state; push_tabs : Lexing.lexbuf -> 'token state;
push_bom : Lexing.lexbuf -> 'token state; push_bom : Lexing.lexbuf -> 'token state;
push_markup : Markup.t -> 'token state; push_markup : Markup.t -> 'token state;
push_comment : Markup.comment -> 'token state
> >
(* LEXER INSTANCE *) (* LEXER INSTANCE *)
@ -178,11 +181,12 @@ type 'token instance = {
input : input; input : input;
read : log:('token logger) -> Lexing.lexbuf -> 'token; read : log:('token logger) -> Lexing.lexbuf -> 'token;
buffer : Lexing.lexbuf; buffer : Lexing.lexbuf;
close : unit -> unit;
get_win : unit -> 'token window; get_win : unit -> 'token window;
get_pos : unit -> Pos.t; get_pos : unit -> Pos.t;
get_last : unit -> Region.t; get_last : unit -> Region.t;
get_file : unit -> file_path; get_file : unit -> file_path;
close : unit -> unit get_comments : unit -> Markup.comment FQueue.t
} }
type open_err = File_opening of string type open_err = File_opening of string

View File

@ -42,3 +42,9 @@ let to_string markup ?(offsets=true) mode =
region, sprintf "BOM \"%s\"" (String.escaped value) in region, sprintf "BOM \"%s\"" (String.escaped value) in
let reg_str = region#compact ~offsets mode let reg_str = region#compact ~offsets mode
in sprintf "%s: %s" reg_str val_str in sprintf "%s: %s" reg_str val_str
(* Comments *)
type comment =
Line of lexeme Region.reg
| Block of lexeme Region.reg

View File

@ -1,11 +1,12 @@
(** This module defines the sorts of markup recognised by the LIGO (* This module defines the sorts of markup recognised by the LIGO
lexer *) lexer *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
(** A lexeme is piece of concrete syntax belonging to a token. In (* A lexeme is piece of concrete syntax belonging to a token. In
algebraic terms, a token is also a piece of abstract lexical algebraic terms, a token is also a piece of abstract lexical
syntax. Lexical units emcompass both markup and lexemes. *) syntax. Lexical units emcompass both markup and lexemes. *)
type lexeme = string type lexeme = string
type t = type t =
@ -18,7 +19,7 @@ type t =
type markup = t type markup = t
(** Pretty-printing of markup (* Pretty-printing of markup
The difference between [to_lexeme] and [to_string] is that the The difference between [to_lexeme] and [to_string] is that the
former builds the corresponding concrete syntax (the lexeme), former builds the corresponding concrete syntax (the lexeme),
@ -31,3 +32,9 @@ type markup = t
val to_lexeme : t -> lexeme val to_lexeme : t -> lexeme
val to_string : t -> ?offsets:bool -> [`Byte | `Point] -> string val to_string : t -> ?offsets:bool -> [`Byte | `Point] -> string
(* Comments *)
type comment =
Line of lexeme Region.reg
| Block of lexeme Region.reg

View File

@ -1,6 +1,14 @@
(* Generic parser for LIGO *) (* Generic parser for LIGO *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerLib = Lexer_shared.LexerLib
module LexerLog = Lexer_shared.LexerLog
(* Input/Output *)
type options = < type options = <
offsets : bool; offsets : bool;

View File

@ -1,6 +1,13 @@
(* Generic parser API for LIGO *) (* Generic parser API for LIGO *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerLib = Lexer_shared.LexerLib
(* Input/Output *)
type options = < type options = <
offsets : bool; offsets : bool;

View File

@ -1,9 +1,17 @@
(* Functor to build a LIGO parser *) (* Functor to build a LIGO parser *)
(* Dependencies *)
module Region = Simple_utils.Region module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module LexerLib = Lexer_shared.LexerLib
module LexerLog = Lexer_shared.LexerLog
module Preproc = Preprocessor.Preproc module Preproc = Preprocessor.Preproc
module SSet = Set.Make (String) module SSet = Set.Make (String)
(* A subtype of [EvalOpt.options] *)
module type SubIO = module type SubIO =
sig sig
type options = < type options = <

View File

@ -1,8 +1,12 @@
(* Functor to build a standalone LIGO parser *) (* Functor to build a standalone LIGO parser *)
module Region = Simple_utils.Region (* Dependencies *)
module SSet : Set.S with type elt = string and type t = Set.Make(String).t module Region = Simple_utils.Region
module EvalOpt = Lexer_shared.EvalOpt
module Lexer = Lexer_shared.Lexer
module SSet : Set.S with type elt = string
and type t = Set.Make(String).t
(* A subtype of [EvalOpt.options] *) (* A subtype of [EvalOpt.options] *)

View File

@ -1,29 +1,51 @@
;; Build of the lexer in common to all Ligo's dialects
(ocamllex Lexer) (ocamllex Lexer)
;; Build of the lexer as a library
(library
(name lexer_shared)
(public_name ligo.lexer.shared)
(libraries
;; Ligo
simple-utils
;; Vendors
Preprocessor
;; Third party
uutf
getopt
zarith)
(modules
LexerLib
LexerUnit
Lexer
LexerLog
Markup
EvalOpt
Version)
(preprocess
(pps bisect_ppx --conditional)))
;; Build of the parser as a library
(library (library
(name parser_shared) (name parser_shared)
(public_name ligo.parser.shared) (public_name ligo.parser.shared)
(libraries (libraries
menhirLib ;; Ligo
lexer_shared
simple-utils simple-utils
uutf ;; Third party
getopt menhirLib)
zarith
Preprocessor)
(preprocess
(pps bisect_ppx --conditional))
(modules (modules
LexerLib
LexerUnit
ParserUnit ParserUnit
ParserAPI ParserAPI)
Lexer (preprocess
LexerLog (pps bisect_ppx --conditional)))
Markup ;; (flags (:standard -open Lexer_shared)))
Utils
FQueue ;; Build of the version source (for the user, as a CLI option)
EvalOpt
Version))
(rule (rule
(targets Version.ml) (targets Version.ml)

View File

@ -409,7 +409,7 @@ and decompile_attributes = function
true -> [wrap "inline"] true -> [wrap "inline"]
| false -> [] | false -> []
and decompile_matching_cases : AST.matching_expr -> ((CST.expr CST.case_clause Region.reg, Region.t) Parser_shared.Utils.nsepseq Region.reg,_) result = and decompile_matching_cases : AST.matching_expr -> ((CST.expr CST.case_clause Region.reg, Region.t) Simple_utils.Utils.nsepseq Region.reg,_) result =
fun m -> fun m ->
let%bind cases = match m with let%bind cases = match m with
Match_variable (var, ty_opt, expr) -> Match_variable (var, ty_opt, expr) ->

View File

@ -584,7 +584,6 @@ and compile_instruction : ?next: AST.expression -> CST.instruction -> _ result
let%bind (_, var, path) = compile_path mlu.path in let%bind (_, var, path) = compile_path mlu.path in
let%bind index = compile_expression @@ mlu.index.value.inside in let%bind index = compile_expression @@ mlu.index.value.inside in
ok @@ (var, path @ [Access_map index]) ok @@ (var, path @ [Access_map index])
in in
match instruction with match instruction with
Cond c -> Cond c ->
@ -747,8 +746,8 @@ and compile_block : ?next:AST.expression -> CST.block CST.reg -> _ result = fun
let (block', _loc) = r_split block in let (block', _loc) = r_split block in
let statements = npseq_to_list block'.statements in let statements = npseq_to_list block'.statements in
let aux (next,attr) statement = let aux (next,attr) statement =
let%bind (statement, attr) = compile_statement ?next attr statement in let%bind (statement, attr) = compile_statement ?next attr statement
return (statement,attr) in return (statement,attr)
in in
let%bind (block', _) = bind_fold_right_list aux (next,None) statements in let%bind (block', _) = bind_fold_right_list aux (next,None) statements in
match block' with match block' with

View File

@ -570,7 +570,7 @@ and decompile_lambda : AST.lambda -> _ = fun {binder;input_type;output_type;resu
let%bind return = decompile_expression result in let%bind return = decompile_expression result in
ok @@ (param,ret_type,return) ok @@ (param,ret_type,return)
and decompile_matching_expr : type a.(AST.expr ->(a,_) result) -> AST.matching_expr -> ((a CST.case_clause Region.reg, Region.t) Parser_shared.Utils.nsepseq Region.reg,_) result = and decompile_matching_expr : type a.(AST.expr ->(a,_) result) -> AST.matching_expr -> ((a CST.case_clause Region.reg, Region.t) Simple_utils.Utils.nsepseq Region.reg,_) result =
fun f m -> fun f m ->
let%bind cases = match m with let%bind cases = match m with
Match_variable (var, _ty_opt, expr) -> Match_variable (var, _ty_opt, expr) ->

View File

@ -15,7 +15,7 @@ type abs_error = [
| `Concrete_pascaligo_unsupported_string_singleton of Raw.type_expr | `Concrete_pascaligo_unsupported_string_singleton of Raw.type_expr
| `Concrete_pascaligo_unsupported_deep_some_pattern of Raw.pattern | `Concrete_pascaligo_unsupported_deep_some_pattern of Raw.pattern
| `Concrete_pascaligo_unsupported_deep_list_pattern of Raw.pattern | `Concrete_pascaligo_unsupported_deep_list_pattern of Raw.pattern
| `Concrete_pascaligo_unsupported_deep_tuple_pattern of (Raw.pattern, Raw.wild) Parser_shared.Utils.nsepseq Raw.par Raw.reg | `Concrete_pascaligo_unsupported_deep_tuple_pattern of (Raw.pattern, Raw.wild) Simple_utils.Utils.nsepseq Raw.par Raw.reg
| `Concrete_pascaligo_unknown_built_in of string | `Concrete_pascaligo_unknown_built_in of string
| `Concrete_pascaligo_michelson_type_wrong of Raw.type_expr * string | `Concrete_pascaligo_michelson_type_wrong of Raw.type_expr * string
| `Concrete_pascaligo_michelson_type_wrong_arity of Location.t * string | `Concrete_pascaligo_michelson_type_wrong_arity of Location.t * string

View File

@ -6,6 +6,7 @@
(* Utilities *) (* Utilities *)
module Utils = Simple_utils.Utils
open Utils open Utils
(* Regions (* Regions
@ -23,6 +24,10 @@ module Region = Simple_utils.Region
type 'a reg = 'a Region.reg type 'a reg = 'a Region.reg
(* Lexemes *)
type lexeme = string
(* Keywords of OCaml *) (* Keywords of OCaml *)
type keyword = Region.t type keyword = Region.t
@ -169,7 +174,7 @@ and type_expr =
| TFun of (type_expr * arrow * type_expr) reg | TFun of (type_expr * arrow * type_expr) reg
| TPar of type_expr par reg | TPar of type_expr par reg
| TVar of variable | TVar of variable
| TString of Lexer.lexeme reg | TString of lexeme reg
and cartesian = (type_expr, times) nsepseq reg and cartesian = (type_expr, times) nsepseq reg
@ -192,9 +197,9 @@ and pattern =
| PFalse of kwd_false | PFalse of kwd_false
| PTrue of kwd_true | PTrue of kwd_true
| PVar of variable | PVar of variable
| PInt of (Lexer.lexeme * Z.t) reg | PInt of (lexeme * Z.t) reg
| PNat of (Lexer.lexeme * Z.t) reg | PNat of (lexeme * Z.t) reg
| PBytes of (Lexer.lexeme * Hex.t) reg | PBytes of (lexeme * Hex.t) reg
| PString of string reg | PString of string reg
| PVerbatim of string reg | PVerbatim of string reg
| PWild of wild | PWild of wild

View File

@ -1 +0,0 @@
include CST

View File

@ -2,12 +2,11 @@
(name cst_cameligo) (name cst_cameligo)
(public_name ligo.cst.cameligo) (public_name ligo.cst.cameligo)
(libraries (libraries
;; Ligo
simple-utils simple-utils
tezos-utils ;; Third party
parser_shared hex
) zarith)
(modules CST ParserLog)
(preprocess (preprocess
(pps ppx_let bisect_ppx --conditional) (pps ppx_let bisect_ppx --conditional)))
)
(flags (:standard -open Parser_shared -open Simple_utils ))
)

View File

@ -2,13 +2,7 @@
(name cst) (name cst)
(public_name ligo.cst) (public_name ligo.cst)
(libraries (libraries
simple-utils
tezos-utils
cst_cameligo cst_cameligo
cst_pascaligo cst_pascaligo)
)
(preprocess (preprocess
(pps ppx_let bisect_ppx --conditional) (pps ppx_let bisect_ppx --conditional)))
)
(flags (:standard -open Simple_utils ))
)

View File

@ -6,6 +6,7 @@
(* Utilities *) (* Utilities *)
module Utils = Simple_utils.Utils
open Utils open Utils
(* Regions (* Regions
@ -23,6 +24,10 @@ module Region = Simple_utils.Region
type 'a reg = 'a Region.reg type 'a reg = 'a Region.reg
(* Lexemes *)
type lexeme = string
(* Keywords of LIGO *) (* Keywords of LIGO *)
type keyword = Region.t type keyword = Region.t
@ -185,7 +190,7 @@ and type_expr =
| TFun of (type_expr * arrow * type_expr) reg | TFun of (type_expr * arrow * type_expr) reg
| TPar of type_expr par reg | TPar of type_expr par reg
| TVar of variable | TVar of variable
| TString of Lexer.lexeme reg | TString of lexeme reg
and cartesian = (type_expr, times) nsepseq reg and cartesian = (type_expr, times) nsepseq reg
@ -227,7 +232,7 @@ and fun_decl = {
and block_with = { and block_with = {
block : block reg; block : block reg;
kwd_with : kwd_with; kwd_with : kwd_with;
expr : expr; expr : expr
} }
and parameters = (param_decl, semi) nsepseq par reg and parameters = (param_decl, semi) nsepseq par reg
@ -460,9 +465,9 @@ and expr =
| EProj of projection reg | EProj of projection reg
| EUpdate of update reg | EUpdate of update reg
| EMap of map_expr | EMap of map_expr
| EVar of Lexer.lexeme reg | EVar of lexeme reg
| ECall of fun_call | ECall of fun_call
| EBytes of (Lexer.lexeme * Hex.t) reg | EBytes of (lexeme * Hex.t) reg
| EUnit of c_Unit | EUnit of c_Unit
| ETuple of tuple_expr | ETuple of tuple_expr
| EPar of expr par reg | EPar of expr par reg
@ -527,14 +532,14 @@ and arith_expr =
| Div of slash bin_op reg | Div of slash bin_op reg
| Mod of kwd_mod bin_op reg | Mod of kwd_mod bin_op reg
| Neg of minus un_op reg | Neg of minus un_op reg
| Int of (Lexer.lexeme * Z.t) reg | Int of (lexeme * Z.t) reg
| Nat of (Lexer.lexeme * Z.t) reg | Nat of (lexeme * Z.t) reg
| Mutez of (Lexer.lexeme * Z.t) reg | Mutez of (lexeme * Z.t) reg
and string_expr = and string_expr =
Cat of cat bin_op reg Cat of cat bin_op reg
| String of Lexer.lexeme reg | String of lexeme reg
| Verbatim of Lexer.lexeme reg | Verbatim of lexeme reg
and list_expr = and list_expr =
ECons of cons bin_op reg ECons of cons bin_op reg
@ -574,7 +579,7 @@ and field_path_assignment = {
and selection = and selection =
FieldName of field_name FieldName of field_name
| Component of (Lexer.lexeme * Z.t) reg | Component of (lexeme * Z.t) reg
and tuple_expr = (expr, comma) nsepseq par reg and tuple_expr = (expr, comma) nsepseq par reg
@ -618,12 +623,12 @@ and ne_injection_kwd =
and pattern = and pattern =
PConstr of constr_pattern PConstr of constr_pattern
| PVar of Lexer.lexeme reg | PVar of lexeme reg
| PWild of wild | PWild of wild
| PInt of (Lexer.lexeme * Z.t) reg | PInt of (lexeme * Z.t) reg
| PNat of (Lexer.lexeme * Z.t) reg | PNat of (lexeme * Z.t) reg
| PBytes of (Lexer.lexeme * Hex.t) reg | PBytes of (lexeme * Hex.t) reg
| PString of Lexer.lexeme reg | PString of lexeme reg
| PList of list_pattern | PList of list_pattern
| PTuple of tuple_pattern | PTuple of tuple_pattern

View File

@ -141,7 +141,7 @@ and print_const_decl state {value; _} =
equal; init; terminator; _} = value in equal; init; terminator; _} = value in
print_token state kwd_const "const"; print_token state kwd_const "const";
print_var state name; print_var state name;
print_option state print_colon_type_expr const_type; print_option state print_type_annot const_type;
print_token state equal "="; print_token state equal "=";
print_expr state init; print_expr state init;
print_terminator state terminator print_terminator state terminator
@ -165,7 +165,7 @@ and print_type_expr state = function
| TVar type_var -> print_var state type_var | TVar type_var -> print_var state type_var
| TString str -> print_string state str | TString str -> print_string state str
and print_colon_type_expr state (colon, type_expr) = and print_type_annot state (colon, type_expr) =
print_token state colon ":"; print_token state colon ":";
print_type_expr state type_expr; print_type_expr state type_expr;
@ -223,7 +223,7 @@ and print_fun_decl state {value; _} =
print_token state kwd_function "function"; print_token state kwd_function "function";
print_var state fun_name; print_var state fun_name;
print_parameters state param; print_parameters state param;
print_option state print_colon_type_expr ret_type; print_option state print_type_annot ret_type;
print_token state kwd_is "is"; print_token state kwd_is "is";
print_expr state return; print_expr state return;
print_terminator state terminator; print_terminator state terminator;
@ -233,7 +233,7 @@ and print_fun_expr state {value; _} =
ret_type; kwd_is; return} : fun_expr = value in ret_type; kwd_is; return} : fun_expr = value in
print_token state kwd_function "function"; print_token state kwd_function "function";
print_parameters state param; print_parameters state param;
print_option state print_colon_type_expr ret_type; print_option state print_type_annot ret_type;
print_token state kwd_is "is"; print_token state kwd_is "is";
print_expr state return print_expr state return
@ -267,13 +267,13 @@ and print_param_const state {value; _} =
let {kwd_const; var; param_type} = value in let {kwd_const; var; param_type} = value in
print_token state kwd_const "const"; print_token state kwd_const "const";
print_var state var; print_var state var;
print_option state print_colon_type_expr param_type print_option state print_type_annot param_type
and print_param_var state {value; _} = and print_param_var state {value; _} =
let {kwd_var; var; param_type} = value in let {kwd_var; var; param_type} = value in
print_token state kwd_var "var"; print_token state kwd_var "var";
print_var state var; print_var state var;
print_option state print_colon_type_expr param_type print_option state print_type_annot param_type
and print_block state block = and print_block state block =
let {enclosing; statements; terminator} = block.value in let {enclosing; statements; terminator} = block.value in
@ -300,7 +300,7 @@ and print_var_decl state {value; _} =
assign; init; terminator} = value in assign; init; terminator} = value in
print_token state kwd_var "var"; print_token state kwd_var "var";
print_var state name; print_var state name;
print_option state print_colon_type_expr var_type; print_option state print_type_annot var_type;
print_token state assign ":="; print_token state assign ":=";
print_expr state init; print_expr state init;
print_terminator state terminator print_terminator state terminator
@ -919,35 +919,49 @@ and pp_declaration state = function
and pp_attr_decl state = pp_ne_injection pp_string state and pp_attr_decl state = pp_ne_injection pp_string state
and pp_fun_decl state decl = and pp_fun_decl state decl =
let arity, start = let kwd_recursive = if decl.kwd_recursive = None then 0 else 1 in
let ret_type = if decl.ret_type = None then 0 else 1 in
let arity = kwd_recursive + ret_type + 3 in
let index = 0 in
let index =
match decl.kwd_recursive with match decl.kwd_recursive with
None -> 4,0 None -> index
| Some _ -> | Some _ -> let state = state#pad arity index in
let state = state#pad 5 0 in pp_node state "recursive";
let () = pp_node state "recursive" index + 1 in
in 5,1 in let index =
let () = let state = state#pad arity index in
let state = state#pad arity start in pp_ident state decl.fun_name;
pp_ident state decl.fun_name in index + 1 in
let () = let index =
let state = state#pad arity (start + 1) in let state = state#pad arity index in
pp_node state "<parameters>"; pp_node state "<parameters>";
pp_parameters state decl.param in pp_parameters state decl.param;
let () = index + 1 in
let state = state#pad arity (start + 2) in let index =
match decl.ret_type with
None -> index
| Some (_, t_expr) ->
let state = state#pad arity index in
pp_node state "<return type>"; pp_node state "<return type>";
print_option (state#pad 1 0) pp_type_expr @@ Option.map snd decl.ret_type in pp_type_expr (state#pad 1 0) t_expr;
index+1 in
let () = let () =
let state = state#pad arity (start + 3) in let state = state#pad arity index in
pp_node state "<return>"; pp_node state "<return>";
pp_expr (state#pad 1 0) decl.return pp_expr (state#pad 1 0) decl.return
in () in ()
and pp_const_decl state decl = and pp_const_decl state decl =
let arity = 3 in let arity = if decl.const_type = None then 2 else 3 in
pp_ident (state#pad arity 0) decl.name; let index = 0 in
print_option (state#pad arity 1) pp_type_expr @@ Option.map snd decl.const_type; let index =
pp_expr (state#pad arity 2) decl.init pp_ident (state#pad arity 0) decl.name; index+1 in
let index =
pp_type_annot (state#pad arity index) index decl.const_type in
let () =
pp_expr (state#pad arity index) decl.init
in ()
and pp_type_expr state = function and pp_type_expr state = function
TProd cartesian -> TProd cartesian ->
@ -1008,43 +1022,49 @@ and pp_type_tuple state {value; _} =
in List.iteri (List.length components |> apply) components in List.iteri (List.length components |> apply) components
and pp_fun_expr state (expr: fun_expr) = and pp_fun_expr state (expr: fun_expr) =
let () = let arity = if expr.ret_type = None then 2 else 3 in
let state = state#pad 3 0 in let index = 0 in
let index =
let state = state#pad arity index in
pp_node state "<parameters>"; pp_node state "<parameters>";
pp_parameters state expr.param in pp_parameters state expr.param;
let () = index + 1 in
let state = state#pad 3 1 in let index =
match expr.ret_type with
None -> index
| Some (_, t_expr) ->
let state = state#pad arity index in
pp_node state "<return type>"; pp_node state "<return type>";
print_option (state#pad 1 0) pp_type_expr @@ Option.map snd expr.ret_type in pp_type_expr (state#pad 1 0) t_expr;
index + 1 in
let () = let () =
let state = state#pad 3 2 in let state = state#pad arity index in
pp_node state "<return>"; pp_node state "<return>";
pp_expr (state#pad 1 0) expr.return pp_expr (state#pad 1 0) expr.return
in () in ()
and pp_code_inj state rc = and pp_code_inj state node =
let () = let () =
let state = state#pad 2 0 in let state = state#pad 2 0 in
pp_node state "<language>"; pp_node state "<language>";
pp_string (state#pad 1 0) rc.language.value in pp_string (state#pad 1 0) node.language.value in
let () = let () =
let state = state#pad 2 1 in let state = state#pad 2 1 in
pp_node state "<code>"; pp_node state "<code>";
pp_expr (state#pad 1 0) rc.code pp_expr (state#pad 1 0) node.code
in () in ()
and pp_block_expr state (bw : block_with) = and pp_block_expr state node =
let {block;expr;_}:CST.block_with = bw in let {block; expr; _} : block_with = node in
let () = let () =
let state = state#pad 2 0 in let state = state#pad 2 0 in
pp_node state "<block>"; pp_node state "<block>";
pp_statements state block.value.statements pp_statements state block.value.statements in
in
let () = let () =
let state = state#pad 2 1 in let state = state#pad 2 1 in
pp_node state "<expr>"; pp_node state "<expr>";
pp_expr (state#pad 1 0) expr in pp_expr (state#pad 1 0) expr
() in ()
and pp_parameters state {value; _} = and pp_parameters state {value; _} =
let params = Utils.nsepseq_to_list value.inside in let params = Utils.nsepseq_to_list value.inside in
@ -1054,13 +1074,15 @@ and pp_parameters state {value; _} =
and pp_param_decl state = function and pp_param_decl state = function
ParamConst {value; region} -> ParamConst {value; region} ->
let arity = if value.param_type = None then 1 else 2 in
pp_loc_node state "ParamConst" region; pp_loc_node state "ParamConst" region;
pp_ident (state#pad 2 0) value.var; pp_ident (state#pad arity 0) value.var;
print_option (state#pad 2 1) pp_type_expr @@ Option.map snd value.param_type ignore (pp_type_annot (state#pad arity 1) 1 value.param_type)
| ParamVar {value; region} -> | ParamVar {value; region} ->
let arity = if value.param_type = None then 1 else 2 in
pp_loc_node state "ParamVar" region; pp_loc_node state "ParamVar" region;
pp_ident (state#pad 2 0) value.var; pp_ident (state#pad 2 0) value.var;
print_option (state#pad 2 1) pp_type_expr @@ Option.map snd value.param_type ignore (pp_type_annot (state#pad arity 1) 1 value.param_type)
and pp_statements state statements = and pp_statements state statements =
let statements = Utils.nsepseq_to_list statements in let statements = Utils.nsepseq_to_list statements in
@ -1461,9 +1483,11 @@ and pp_data_decl state = function
pp_fun_decl state value pp_fun_decl state value
and pp_var_decl state decl = and pp_var_decl state decl =
pp_ident (state#pad 3 0) decl.name; let arity = if decl.var_type = None then 2 else 3 in
print_option (state#pad 3 1) pp_type_expr @@ Option.map snd decl.var_type; let index = 0 in
pp_expr (state#pad 3 2) decl.init let index = pp_ident (state#pad arity index) decl.name; index+1 in
let index = pp_type_annot (state#pad arity index) index decl.var_type
in pp_expr (state#pad arity index) decl.init
and pp_expr state = function and pp_expr state = function
ECase {value; region} -> ECase {value; region} ->
@ -1663,3 +1687,7 @@ and pp_bin_op node region state op =
pp_loc_node state node region; pp_loc_node state node region;
pp_expr (state#pad 2 0) op.arg1; pp_expr (state#pad 2 0) op.arg1;
pp_expr (state#pad 2 1) op.arg2 pp_expr (state#pad 2 1) op.arg2
and pp_type_annot state index = function
None -> index
| Some (_, e) -> pp_type_expr state e; index+1

View File

@ -2,12 +2,11 @@
(name cst_pascaligo) (name cst_pascaligo)
(public_name ligo.cst.pascaligo) (public_name ligo.cst.pascaligo)
(libraries (libraries
;; Ligo
simple-utils simple-utils
tezos-utils ;; Third party
parser_shared hex
) zarith)
(modules CST ParserLog)
(preprocess (preprocess
(pps ppx_let bisect_ppx --conditional) (pps ppx_let bisect_ppx --conditional)))
)
(flags (:standard -open Parser_shared -open Simple_utils ))
)

View File

@ -1 +0,0 @@
include CST

View File

@ -7,9 +7,15 @@
(libraries (libraries
getopt getopt
simple-utils) simple-utils)
(modules EvalOpt E_Parser E_Lexer E_AST Preproc) (modules
EvalOpt
E_Parser
E_Lexer
E_AST
Preproc)
(preprocess (preprocess
(pps bisect_ppx --conditional))) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils)))
;; Building the lexers of the preprocessor ;; Building the lexers of the preprocessor

View File

@ -4,9 +4,6 @@
(libraries (libraries
yojson yojson
unix unix
str str)
)
(preprocess (preprocess
(pps ppx_let) (pps ppx_let)))
)
)

View File

@ -3,20 +3,16 @@ name : "simple-utils"
version : "dev" version : "dev"
synopsis : "LIGO utilities, to be used by other libraries" synopsis : "LIGO utilities, to be used by other libraries"
maintainer : "Galfour <contact@ligolang.org>" maintainer : "Galfour <contact@ligolang.org>"
authors : "Galfour" authors : "Galfour, Christian Rinderknecht"
license : "MIT" license : "MIT"
homepage : "https://gitlab.com/ligolang/ligo-utils" homepage : "https://gitlab.com/ligolang/ligo-utils"
bug-reports : "https://gitlab.com/ligolang/ligo-utils/issues" bug-reports : "https://gitlab.com/ligolang/ligo-utils/issues"
depends: [ depends : ["dune"
"dune"
"base" "base"
"yojson" "yojson"
"ppx_let" "ppx_let"
# from ppx_let: # from ppx_let:
"ocaml" {>= "4.04.2" & < "4.08.0"} "ocaml" {>= "4.04.2" & < "4.08.0"}
"dune" {build & >= "1.5.1"} "dune" {build & >= "1.5.1"}
"ppxlib" {>= "0.5.0"} "ppxlib" {>= "0.5.0"}]
] build : [["dune" "build" "-p" name]]
build: [
["dune" "build" "-p" name]
]

View File

@ -10,10 +10,14 @@ module Int = X_int
module Tuple = Tuple module Tuple = Tuple
module Map = X_map module Map = X_map
module Tree = Tree module Tree = Tree
module Region = Region
module Pos = Pos
module Var = Var module Var = Var
module Ligo_string = X_string module Ligo_string = X_string
module Display = Display module Display = Display
module Runned_result = Runned_result module Runned_result = Runned_result
(* Originally by Christian Rinderknecht *)
module Pos = Pos
module Region = Region
module Utils = Utils
module FQueue = FQueue