Merge branch 'rinderknecht@pprint_comments' into 'dev'
Refactoring of the build of the front-end See merge request ligolang/ligo!698
This commit is contained in:
commit
3656d9e1ff
@ -676,26 +676,26 @@ let%expect_test _ =
|
||||
|
||||
type entrypointReturn is list (operation) * storage
|
||||
|
||||
const errorTokenUndefined = "TOKEN_UNDEFINED"
|
||||
const errorTokenUndefined = "TOKEN_UNDEFINED"
|
||||
|
||||
const errorNotOwner = "NOT_OWNER"
|
||||
const errorNotOwner = "NOT_OWNER"
|
||||
|
||||
const errorInsufficientBalance = "INSUFFICIENT_BALANCE"
|
||||
const errorInsufficientBalance = "INSUFFICIENT_BALANCE"
|
||||
|
||||
type transferContentsIteratorAccumulator is
|
||||
storage * tokenOwner
|
||||
|
||||
function transferContentsIterator
|
||||
(const gen__P
|
||||
: transferContentsIteratorAccumulator *
|
||||
(const gen__P :
|
||||
transferContentsIteratorAccumulator *
|
||||
transferContentsMichelson) is
|
||||
block {
|
||||
const gen__rhs1 = gen__P;
|
||||
const accumulator = gen__rhs1.0;
|
||||
const transferContentsMichelson = gen__rhs1.1;
|
||||
const gen__rhs2 = accumulator;
|
||||
const storage = gen__rhs2.0;
|
||||
const from_ = gen__rhs2.1;
|
||||
const gen__rhs1 = gen__P;
|
||||
const accumulator = gen__rhs1.0;
|
||||
const transferContentsMichelson = gen__rhs1.1;
|
||||
const gen__rhs2 = accumulator;
|
||||
const storage = gen__rhs2.0;
|
||||
const from_ = gen__rhs2.1;
|
||||
const transferContents
|
||||
= (Layout.convert_from_right_comb
|
||||
(transferContentsMichelson)
|
||||
@ -726,37 +726,37 @@ let%expect_test _ =
|
||||
function transferIterator
|
||||
(const gen__P : storage * transferMichelson) is
|
||||
block {
|
||||
const gen__rhs7 = gen__P;
|
||||
const storage = gen__rhs7.0;
|
||||
const transferMichelson = gen__rhs7.1;
|
||||
const gen__rhs7 = gen__P;
|
||||
const storage = gen__rhs7.0;
|
||||
const transferMichelson = gen__rhs7.1;
|
||||
const transferAuxiliary2
|
||||
= (Layout.convert_from_right_comb (transferMichelson)
|
||||
: transferAuxiliary);
|
||||
const from_ = (transferAuxiliary2.from_ : tokenOwner);
|
||||
const from_ = (transferAuxiliary2.from_ : tokenOwner);
|
||||
allowOnlyOwnTransfer (from_);
|
||||
const gen__rhs10
|
||||
= List.fold
|
||||
(transferContentsIterator, transferAuxiliary2.txs,
|
||||
(storage, from_));
|
||||
const storage = gen__rhs10.0;
|
||||
const _ = gen__rhs10.1
|
||||
const storage = gen__rhs10.0;
|
||||
const _ = gen__rhs10.1
|
||||
} with storage
|
||||
|
||||
function transfer
|
||||
(const gen__P : transferParameter * storage) is
|
||||
block {
|
||||
const gen__rhs11 = gen__P;
|
||||
const transferParameter = gen__rhs11.0;
|
||||
const storage = gen__rhs11.1;
|
||||
const gen__rhs11 = gen__P;
|
||||
const transferParameter = gen__rhs11.0;
|
||||
const storage = gen__rhs11.1;
|
||||
const storage
|
||||
= List.fold (transferIterator, transferParameter, storage)
|
||||
} with ((list [] : list (operation)), storage)
|
||||
|
||||
function main (const gen__P : entrypointParameter) is
|
||||
block {
|
||||
const gen__rhs13 = gen__P;
|
||||
const parameter = gen__rhs13.0;
|
||||
const storage = gen__rhs13.1
|
||||
const gen__rhs13 = gen__P;
|
||||
const parameter = gen__rhs13.0;
|
||||
const storage = gen__rhs13.1
|
||||
} with
|
||||
case parameter of [
|
||||
Transfer (transferParameter) ->
|
||||
@ -1086,7 +1086,7 @@ let%expect_test _ =
|
||||
= if EQ (Tezos.sender, p.address_from)
|
||||
then
|
||||
block {
|
||||
const new_allowances = s.allowances;
|
||||
const new_allowances = s.allowances;
|
||||
gen__env9.new_allowances := new_allowances;
|
||||
skip
|
||||
} with gen__env9
|
||||
@ -1127,14 +1127,14 @@ let%expect_test _ =
|
||||
gen__env9.new_allowances := new_allowances;
|
||||
skip
|
||||
} with gen__env9;
|
||||
const new_allowances = gen__env9.new_allowances;
|
||||
const new_allowances = gen__env9.new_allowances;
|
||||
const sender_balance : nat
|
||||
= case Map.find_opt (p.address_from, s.tokens) of [
|
||||
Some (value) -> value
|
||||
| None -> 0n
|
||||
];
|
||||
const new_tokens : tokens = big_map [];
|
||||
const gen__env12 = record [new_tokens = new_tokens];
|
||||
const gen__env12 = record [new_tokens = new_tokens];
|
||||
const gen__env12
|
||||
= if LT (sender_balance, p.value)
|
||||
then
|
||||
@ -1164,7 +1164,7 @@ let%expect_test _ =
|
||||
gen__env12.new_tokens := new_tokens;
|
||||
skip
|
||||
} with gen__env12;
|
||||
const new_tokens = gen__env12.new_tokens
|
||||
const new_tokens = gen__env12.new_tokens
|
||||
} with
|
||||
((list [] : list (operation)),
|
||||
s with
|
||||
@ -1205,7 +1205,7 @@ let%expect_test _ =
|
||||
gen__env14.new_allowances := new_allowances;
|
||||
skip
|
||||
} with gen__env14;
|
||||
const new_allowances = gen__env14.new_allowances
|
||||
const new_allowances = gen__env14.new_allowances
|
||||
} with
|
||||
((list [] : list (operation)),
|
||||
s with
|
||||
@ -1801,22 +1801,22 @@ let%expect_test _ =
|
||||
function foobar (const i : int) : int is
|
||||
block {
|
||||
const p : parameter = (Zero (42n));
|
||||
const gen__env7 = record [i = i];
|
||||
const gen__env7 = record [i = i];
|
||||
const gen__env7
|
||||
= if GT (i, 0)
|
||||
then
|
||||
block {
|
||||
const i = ADD (i, 1);
|
||||
const i = ADD (i, 1);
|
||||
gen__env7.i := i;
|
||||
const gen__env5 = record [i = i];
|
||||
const gen__env5 = record [i = i];
|
||||
const gen__env5
|
||||
= if GT (i, 10)
|
||||
then
|
||||
block {
|
||||
const i = 20;
|
||||
const i = 20;
|
||||
gen__env5.i := i;
|
||||
failwith ("who knows");
|
||||
const i = 30;
|
||||
const i = 30;
|
||||
gen__env5.i := i;
|
||||
skip
|
||||
} with gen__env5
|
||||
@ -1824,7 +1824,7 @@ let%expect_test _ =
|
||||
block {
|
||||
skip
|
||||
} with gen__env5;
|
||||
const i = gen__env5.i;
|
||||
const i = gen__env5.i;
|
||||
gen__env7.i := i;
|
||||
skip
|
||||
} with gen__env7
|
||||
@ -1835,7 +1835,7 @@ let%expect_test _ =
|
||||
| Pos (n) -> skip
|
||||
]
|
||||
} with gen__env7;
|
||||
const i = gen__env7.i
|
||||
const i = gen__env7.i
|
||||
} with
|
||||
case p of [
|
||||
Zero (n) -> i
|
||||
|
@ -1,11 +1,12 @@
|
||||
module CST = Cst.Cameligo
|
||||
module LexToken = Parser_cameligo.LexToken
|
||||
module Lexer = Lexer.Make(LexToken)
|
||||
module LexToken = Lexer_cameligo.LexToken
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
module Scoping = Parser_cameligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_cameligo.ParErr
|
||||
module SSet = Set.Make (String)
|
||||
module Pretty = Parser_cameligo.Pretty
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
|
||||
(* Mock IOs TODO: Fill them with CLI options *)
|
||||
|
||||
|
@ -5,17 +5,24 @@ $HOME/git/OCaml-build/Makefile
|
||||
../shared/LexerLib.ml
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
../shared/FQueue.mli
|
||||
../shared/LexerLog.mli
|
||||
../shared/LexerLog.ml
|
||||
../shared/Markup.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.mli
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
|
||||
./Stubs/Lexer_shared.ml
|
||||
./Stubs/Lexer_cameligo.ml
|
||||
./Stubs/Parser_shared.ml
|
||||
./Stubs/Parser_cameligo.ml
|
||||
./Stubs/Cst.ml
|
||||
./Stubs/Cst_cameligo.ml
|
||||
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/CST.ml
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.mli
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.ml
|
||||
|
@ -21,13 +21,16 @@
|
||||
aliased to [token].
|
||||
*)
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
|
||||
type lexeme = string
|
||||
module Markup = Lexer_shared.Markup
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type t =
|
||||
(* Identifiers, labels, numbers and strings *)
|
||||
|
||||
|
@ -1,17 +1,19 @@
|
||||
(* ocamlex specification for CameLIGO *)
|
||||
{
|
||||
(* START HEADER *)
|
||||
(* START HEADER *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module SMap = Utils.String.Map
|
||||
module SSet = Utils.String.Set
|
||||
module Markup = Lexer_shared.Markup
|
||||
module SMap = Map.Make (String)
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type t =
|
||||
(* Identifiers, labels, numbers and strings *)
|
||||
|
||||
@ -107,6 +109,8 @@ type t =
|
||||
|
||||
(* Projections *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
type token = t
|
||||
|
||||
let proj_token = function
|
||||
|
@ -1,6 +1,14 @@
|
||||
(* Driver for the CameLIGO lexer *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerUnit = Lexer_shared.LexerUnit
|
||||
module LexToken = Lexer_cameligo.LexToken
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
|
@ -1,4 +1,5 @@
|
||||
%{
|
||||
module LexToken = Lexer_cameligo.LexToken
|
||||
%}
|
||||
|
||||
(* Tokens (mirroring thise defined in module LexToken) *)
|
||||
|
@ -15,8 +15,8 @@ open CST
|
||||
(* Entry points *)
|
||||
|
||||
%start contract interactive_expr
|
||||
%type <Cst.Cameligo.t> contract
|
||||
%type <Cst.Cameligo.expr> interactive_expr
|
||||
%type <CST.t> contract
|
||||
%type <CST.expr> interactive_expr
|
||||
|
||||
%%
|
||||
|
||||
|
@ -1,7 +1,16 @@
|
||||
(* Driver for the CameLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module LexToken = Lexer_cameligo.LexToken
|
||||
module CST = Cst.Cameligo
|
||||
module SSet = Set.Make (String)
|
||||
module ParserUnit = Parser_shared.ParserUnit
|
||||
module Pretty = Parser_cameligo.Pretty
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
@ -55,22 +64,22 @@ module SubIO =
|
||||
|
||||
module Parser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Parser_cameligo.Parser
|
||||
end
|
||||
|
||||
module ParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include ParserLog
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Cst_cameligo.ParserLog
|
||||
end
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
[@@@warning "-42"]
|
||||
|
||||
module CST=Cst.Cameligo
|
||||
module CST = Cst.Cameligo
|
||||
open CST
|
||||
module Region = Simple_utils.Region
|
||||
open! Region
|
||||
open! PPrint
|
||||
module Option = Simple_utils.Option
|
||||
|
||||
let pp_par printer {value; _} =
|
||||
string "(" ^^ nest 1 (printer value.inside ^^ string ")")
|
||||
|
2
src/passes/01-parsing/cameligo/README.txt
Normal file
2
src/passes/01-parsing/cameligo/README.txt
Normal file
@ -0,0 +1,2 @@
|
||||
Note: The files Scoping.mli and Scoping.ml are the destination of
|
||||
symbolic links from ../reasonligo.
|
@ -1,7 +1,9 @@
|
||||
[@@@warning "-42"]
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module CST = Cst.Cameligo
|
||||
module CST = Cst.Cameligo
|
||||
|
||||
type t =
|
||||
Reserved_name of CST.variable
|
||||
@ -17,7 +19,7 @@ open Region
|
||||
|
||||
(* Useful modules *)
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module Ord =
|
||||
struct
|
||||
|
1
src/passes/01-parsing/cameligo/Stubs/Cst.ml
Normal file
1
src/passes/01-parsing/cameligo/Stubs/Cst.ml
Normal file
@ -0,0 +1 @@
|
||||
module Cameligo = CST
|
1
src/passes/01-parsing/cameligo/Stubs/Cst_cameligo.ml
Normal file
1
src/passes/01-parsing/cameligo/Stubs/Cst_cameligo.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserLog = ParserLog
|
1
src/passes/01-parsing/cameligo/Stubs/Lexer_cameligo.ml
Normal file
1
src/passes/01-parsing/cameligo/Stubs/Lexer_cameligo.ml
Normal file
@ -0,0 +1 @@
|
||||
module LexToken = LexToken
|
6
src/passes/01-parsing/cameligo/Stubs/Lexer_shared.ml
Normal file
6
src/passes/01-parsing/cameligo/Stubs/Lexer_shared.ml
Normal file
@ -0,0 +1,6 @@
|
||||
module EvalOpt = EvalOpt
|
||||
module Markup = Markup
|
||||
module Lexer = Lexer
|
||||
module LexerUnit = LexerUnit
|
||||
module LexerLog = LexerLog
|
||||
module LexerLib = LexerLib
|
2
src/passes/01-parsing/cameligo/Stubs/Parser_cameligo.ml
Normal file
2
src/passes/01-parsing/cameligo/Stubs/Parser_cameligo.ml
Normal file
@ -0,0 +1,2 @@
|
||||
module Pretty = Pretty
|
||||
module Parser = Parser
|
1
src/passes/01-parsing/cameligo/Stubs/Parser_shared.ml
Normal file
1
src/passes/01-parsing/cameligo/Stubs/Parser_shared.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserUnit = ParserUnit
|
@ -1,3 +0,0 @@
|
||||
module Parser = Parser
|
||||
module Lexer = Lexer
|
||||
module LexToken = LexToken
|
@ -1,13 +1,48 @@
|
||||
;; --------------------------------------------------------------------
|
||||
;; LEXING
|
||||
|
||||
;; Build of the lexer
|
||||
|
||||
(ocamllex LexToken)
|
||||
|
||||
;; Build of the lexer as a library
|
||||
|
||||
(library
|
||||
(name lexer_cameligo)
|
||||
(public_name ligo.lexer.cameligo)
|
||||
(modules LexToken)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
;; Third party
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
lexer_cameligo
|
||||
;; Third party
|
||||
hex)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; --------------------------------------------------------------------
|
||||
;; PARSING
|
||||
|
||||
;; Build of the parser
|
||||
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
(flags -la 1 --table --strict --explain
|
||||
--external-tokens Lexer_cameligo.LexToken))
|
||||
|
||||
;; Build of the parser as a library
|
||||
|
||||
@ -15,50 +50,47 @@
|
||||
(name parser_cameligo)
|
||||
(public_name ligo.parser.cameligo)
|
||||
(modules
|
||||
Scoping cameligo Parser LexToken ParErr Pretty)
|
||||
Scoping Parser ParErr Pretty)
|
||||
(libraries
|
||||
pprint
|
||||
terminal_size
|
||||
menhirLib
|
||||
parser_shared
|
||||
str
|
||||
simple-utils
|
||||
tezos-utils
|
||||
cst
|
||||
)
|
||||
;; Ligo
|
||||
lexer_cameligo
|
||||
parser_shared
|
||||
cst
|
||||
;; Vendors
|
||||
simple-utils
|
||||
;; Third party
|
||||
pprint
|
||||
terminal_size
|
||||
menhirLib
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||
(flags (:standard -open Cst_cameligo))) ;; For CST in Parser.mli
|
||||
|
||||
;; Build of the unlexer (for covering the error states of the LR
|
||||
;; automaton)
|
||||
;; Build of the unlexer (for covering the
|
||||
;; error states of the LR automaton)
|
||||
|
||||
(executable
|
||||
(name Unlexer)
|
||||
(libraries str)
|
||||
(modules Unlexer)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules Unlexer))
|
||||
|
||||
;; Local build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries parser_cameligo)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Parser_cameligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Local build of a standalone parser
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries parser_cameligo)
|
||||
(libraries
|
||||
;; Ligo
|
||||
parser_shared
|
||||
parser_cameligo
|
||||
cst
|
||||
;; Third party
|
||||
hex)
|
||||
(modules ParserMain Parser_msg)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of the covering of error states in the LR automaton
|
||||
|
||||
@ -85,7 +117,14 @@
|
||||
(rule
|
||||
(targets all.mligo)
|
||||
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
|
||||
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))
|
||||
(action (run %{script_cover}
|
||||
--lex-tokens=LexToken.mli
|
||||
--par-tokens=ParToken.mly
|
||||
--ext=mligo
|
||||
--unlexer=./Unlexer.exe
|
||||
--messages=Parser.msg
|
||||
--dir=.
|
||||
--concatenate Parser.mly)))
|
||||
|
||||
;; Error messages
|
||||
|
||||
@ -95,60 +134,45 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(mode (promote (until-clean) (only *)))
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(alias
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages)))
|
||||
|
||||
(rule
|
||||
(targets ParErr.ml)
|
||||
@ -156,16 +180,12 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in))))
|
||||
|
@ -2,4 +2,4 @@ module Pascaligo = Pascaligo
|
||||
module Cameligo = Cameligo
|
||||
module Reasonligo = Reasonligo
|
||||
module Errors = Errors
|
||||
module Formatter = Formatter
|
||||
module Formatter = Formatter
|
||||
|
@ -1,6 +1,9 @@
|
||||
(* Dependencies *)
|
||||
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module CST = Cst.Pascaligo
|
||||
module LexToken = Parser_pascaligo.LexToken
|
||||
module Lexer = Lexer.Make(LexToken)
|
||||
module LexToken = Lexer_pascaligo.LexToken
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
module Scoping = Parser_pascaligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_pascaligo.ParErr
|
||||
|
@ -6,14 +6,10 @@ $HOME/git/OCaml-build/Makefile
|
||||
../shared/LexerLib.ml
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
../shared/FQueue.mli
|
||||
../shared/LexerLog.mli
|
||||
../shared/LexerLog.ml
|
||||
../shared/Markup.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.mli
|
||||
@ -21,3 +17,14 @@ $HOME/git/OCaml-build/Makefile
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
../shared/LexerLib.ml
|
||||
|
||||
./Stubs/Lexer_shared.ml
|
||||
./Stubs/Lexer_pascaligo.ml
|
||||
./Stubs/Parser_shared.ml
|
||||
./Stubs/Parser_pascaligo.ml
|
||||
./Stubs/Cst.ml
|
||||
./Stubs/Cst_pascaligo.ml
|
||||
|
||||
$HOME/git/ligo/src/stages/1-cst/pascaligo/CST.ml
|
||||
$HOME/git/ligo/src/stages/1-cst/pascaligo/ParserLog.mli
|
||||
$HOME/git/ligo/src/stages/1-cst/pascaligo/ParserLog.ml
|
||||
|
@ -21,13 +21,16 @@
|
||||
aliased to [token].
|
||||
*)
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
|
||||
type lexeme = string
|
||||
module Markup = Lexer_shared.Markup
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type attribute = {
|
||||
header : string;
|
||||
string : lexeme Region.reg
|
||||
|
@ -1,21 +1,20 @@
|
||||
(* Lexer specification for LIGO, to be processed by [ocamllex] *)
|
||||
(* ocamlex specification for PascaLIGO *)
|
||||
|
||||
{
|
||||
(* START HEADER *)
|
||||
|
||||
(* Shorthands *)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module Markup = Lexer_shared.Markup
|
||||
module SMap = Map.Make (String)
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type attribute = {
|
||||
header : string;
|
||||
string : lexeme Region.reg
|
||||
@ -117,6 +116,8 @@ type t =
|
||||
|
||||
(* Projections *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
type token = t
|
||||
|
||||
let proj_token = function
|
||||
|
@ -1,6 +1,14 @@
|
||||
(* Driver for the PascaLIGO lexer *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerUnit = Lexer_shared.LexerUnit
|
||||
module LexToken = Lexer_pascaligo.LexToken
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
@ -10,8 +18,12 @@ module IO =
|
||||
in read ~block ~line:"//" ".ligo"
|
||||
end
|
||||
|
||||
(* Instantiating the standalone lexer *)
|
||||
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
||||
(* Tracing all tokens in the source *)
|
||||
|
||||
let () =
|
||||
match M.trace () with
|
||||
Stdlib.Ok () -> ()
|
||||
|
@ -1,4 +1,5 @@
|
||||
%{
|
||||
module LexToken = Lexer_pascaligo.LexToken
|
||||
%}
|
||||
|
||||
(* Tokens (mirroring thise defined in module LexToken) *)
|
||||
|
@ -1,10 +1,13 @@
|
||||
(* Menhir specification of the parsing of PascaLIGO *)
|
||||
%{
|
||||
(* START HEADER *)
|
||||
|
||||
[@@@warning "-42"]
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
open Simple_utils.Region
|
||||
module CST = Cst.Pascaligo
|
||||
module CST = Cst.Pascaligo
|
||||
open CST
|
||||
|
||||
(* END HEADER *)
|
||||
@ -15,8 +18,8 @@ open CST
|
||||
(* Entry points *)
|
||||
|
||||
%start contract interactive_expr
|
||||
%type <Cst.Pascaligo.t> contract
|
||||
%type <Cst.Pascaligo.expr> interactive_expr
|
||||
%type <CST.t> contract
|
||||
%type <CST.expr> interactive_expr
|
||||
|
||||
%%
|
||||
|
||||
@ -143,7 +146,9 @@ type_decl:
|
||||
terminator = $5}
|
||||
in {region; value} }
|
||||
|
||||
type_expr_colon: ":" type_expr { $1,$2 }
|
||||
type_annot:
|
||||
":" type_expr { $1,$2 }
|
||||
|
||||
type_expr:
|
||||
fun_type | sum_type | record_type { $1 }
|
||||
|
||||
@ -241,7 +246,7 @@ field_decl:
|
||||
|
||||
|
||||
fun_expr:
|
||||
"function" parameters type_expr_colon? "is" expr {
|
||||
"function" parameters type_annot? "is" expr {
|
||||
let stop = expr_to_region $5 in
|
||||
let region = cover $1 stop
|
||||
and value = {kwd_function = $1;
|
||||
@ -254,8 +259,7 @@ fun_expr:
|
||||
(* Function declarations *)
|
||||
|
||||
open_fun_decl:
|
||||
ioption ("recursive") "function" fun_name parameters type_expr_colon? "is"
|
||||
expr {
|
||||
ioption("recursive") "function" fun_name parameters type_annot? "is" expr {
|
||||
Scoping.check_reserved_name $3;
|
||||
let stop = expr_to_region $7 in
|
||||
let region = cover $2 stop
|
||||
@ -356,7 +360,7 @@ open_var_decl:
|
||||
in {region; value} }
|
||||
|
||||
unqualified_decl(OP):
|
||||
var type_expr_colon? OP expr {
|
||||
var type_annot? OP expr {
|
||||
Scoping.check_reserved_name $1;
|
||||
let region = expr_to_region $4
|
||||
in $1, $2, $3, $4, region }
|
||||
@ -594,28 +598,17 @@ while_loop:
|
||||
in While {region; value} }
|
||||
|
||||
for_loop:
|
||||
"for" var ":=" expr "to" expr block {
|
||||
let region = cover $1 $7.region in
|
||||
let value = {kwd_for = $1;
|
||||
binder = $2;
|
||||
assign = $3;
|
||||
init = $4;
|
||||
kwd_to = $5;
|
||||
bound = $6;
|
||||
step = None;
|
||||
block = $7}
|
||||
in For (ForInt {region; value})
|
||||
}
|
||||
| "for" var ":=" expr "to" expr "step" expr block {
|
||||
let region = cover $1 $9.region in
|
||||
let value = {kwd_for = $1;
|
||||
binder = $2;
|
||||
assign = $3;
|
||||
init = $4;
|
||||
kwd_to = $5;
|
||||
bound = $6;
|
||||
step = Some ($7, $8);
|
||||
block = $9}
|
||||
"for" var ":=" expr "to" expr step_clause? block {
|
||||
Scoping.check_reserved_name $2;
|
||||
let region = cover $1 $8.region in
|
||||
let value = {kwd_for = $1;
|
||||
binder = $2;
|
||||
assign = $3;
|
||||
init = $4;
|
||||
kwd_to = $5;
|
||||
bound = $6;
|
||||
step = $7;
|
||||
block = $8}
|
||||
in For (ForInt {region; value})
|
||||
}
|
||||
| "for" var arrow_clause? "in" collection expr block {
|
||||
@ -630,6 +623,9 @@ for_loop:
|
||||
block = $7}
|
||||
in For (ForCollect {region; value}) }
|
||||
|
||||
step_clause:
|
||||
"step" expr { $1,$2 }
|
||||
|
||||
collection:
|
||||
"map" { Map $1 }
|
||||
| "set" { Set $1 }
|
||||
@ -650,18 +646,13 @@ expr:
|
||||
| fun_expr { EFun $1 }
|
||||
| block_with { EBlock $1 }
|
||||
|
||||
block_with :
|
||||
block_with:
|
||||
block "with" expr {
|
||||
let start = $2
|
||||
and stop = expr_to_region $3 in
|
||||
let start = $2
|
||||
and stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
let value : CST.block_with = {
|
||||
block = $1;
|
||||
kwd_with = $2;
|
||||
expr = $3;
|
||||
}
|
||||
in {value;region}
|
||||
}
|
||||
let value = {block=$1; kwd_with=$2; expr=$3}
|
||||
in {region; value} }
|
||||
|
||||
cond_expr:
|
||||
"if" expr "then" expr ";"? "else" expr {
|
||||
|
@ -1,7 +1,16 @@
|
||||
(* Driver for the PascaLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module LexToken = Lexer_pascaligo.LexToken
|
||||
module CST = Cst.Pascaligo
|
||||
module SSet = Set.Make (String)
|
||||
module ParserUnit = Parser_shared.ParserUnit
|
||||
module Pretty = Parser_pascaligo.Pretty
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
@ -55,22 +64,22 @@ module SubIO =
|
||||
|
||||
module Parser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Parser_pascaligo.Parser
|
||||
end
|
||||
|
||||
module ParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include ParserLog
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Cst_pascaligo.ParserLog
|
||||
end
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
|
@ -40,12 +40,16 @@ and pp_attr_decl decl = pp_ne_injection pp_string decl
|
||||
|
||||
and pp_const_decl {value; _} =
|
||||
let {name; const_type; init; attributes; _} = value in
|
||||
let attr = match attributes with
|
||||
None -> empty
|
||||
| Some a -> hardline ^^ pp_attr_decl a in
|
||||
let start = string ("const " ^ name.value) in
|
||||
let t_expr = const_type in
|
||||
let attr = match attributes with
|
||||
None -> empty
|
||||
| Some a -> hardline ^^ pp_attr_decl a in
|
||||
group (start ^/^ pp_option (fun (_, d) -> nest 2 (string ": " ^^ pp_type_expr d)) t_expr)
|
||||
let start =
|
||||
match const_type with
|
||||
None -> start
|
||||
| Some (_, e) ->
|
||||
group (start ^/^ nest 2 (string ": " ^^ pp_type_expr e)) in
|
||||
start
|
||||
^^ group (break 1 ^^ nest 2 (string "= " ^^ pp_expr init))
|
||||
^^ attr
|
||||
|
||||
@ -127,34 +131,46 @@ and pp_type_tuple {value; _} =
|
||||
|
||||
and pp_fun_expr {value; _} =
|
||||
let {param; ret_type; return; _} : fun_expr = value in
|
||||
let start = string "function" in
|
||||
let start = string "function" in
|
||||
let parameters = pp_par pp_parameters param in
|
||||
let expr = pp_expr return in
|
||||
let t_annot =
|
||||
match ret_type with
|
||||
None -> empty
|
||||
| Some (_, e) ->
|
||||
group (break 1 ^^ nest 2 (string ": " ^^ pp_type_expr e)) in
|
||||
group (start ^^ nest 2 (break 1 ^^ parameters))
|
||||
^^ pp_option (fun (_,d) -> group (break 1 ^^ nest 2 (string ": " ^^ pp_type_expr d))) ret_type
|
||||
^^ string " is" ^^ group (nest 4 (break 1 ^^ expr))
|
||||
^^ t_annot
|
||||
^^ string " is" ^^ group (nest 4 (break 1 ^^ pp_expr return))
|
||||
|
||||
and pp_fun_decl {value; _} =
|
||||
let {kwd_recursive; fun_name; param;
|
||||
ret_type; return; attributes; _} = value in
|
||||
let {kwd_recursive; fun_name; param; ret_type;
|
||||
return; attributes; _} = value in
|
||||
let start =
|
||||
match kwd_recursive with
|
||||
None -> string "function"
|
||||
| Some _ -> string "recursive" ^/^ string "function" in
|
||||
let start = start ^^ group (break 1 ^^ nest 2 (pp_ident fun_name)) in
|
||||
let parameters = pp_par pp_parameters param in
|
||||
let expr = pp_expr return in
|
||||
let body =
|
||||
let start = start ^^ group (break 1 ^^ nest 2 (pp_ident fun_name))
|
||||
and parameters = pp_par pp_parameters param
|
||||
and t_annot_is =
|
||||
match ret_type with
|
||||
None -> string " is"
|
||||
| Some (_, e) ->
|
||||
let ret_type = pp_type_expr e in
|
||||
group (nest 2 (break 1 ^^ string ": " ^^ nest 2 ret_type
|
||||
^^ string " is"))
|
||||
and body =
|
||||
let expr = pp_expr return in
|
||||
match return with
|
||||
EBlock _ -> group (break 1 ^^ expr)
|
||||
| _ -> group (nest 2 (break 1 ^^ expr))
|
||||
and attr =
|
||||
match attributes with
|
||||
None -> empty
|
||||
None -> empty
|
||||
| Some a -> hardline ^^ pp_attr_decl a in
|
||||
prefix 2 1 start parameters
|
||||
^^ group (nest 2 (pp_option (fun (_, d) -> break 1 ^^ string ": " ^^ nest 2 (pp_type_expr d)) ret_type ^^ string " is"))
|
||||
^^ body ^^ attr
|
||||
^^ t_annot_is
|
||||
^^ body
|
||||
^^ attr
|
||||
|
||||
and pp_parameters p = pp_nsepseq ";" pp_param_decl p
|
||||
|
||||
@ -164,13 +180,19 @@ and pp_param_decl = function
|
||||
|
||||
and pp_param_const {value; _} =
|
||||
let {var; param_type; _} : param_const = value in
|
||||
let name = string ("const " ^ var.value)
|
||||
in prefix 2 1 name @@ pp_option (fun (_,d) -> string ": " ^^ pp_type_expr d) param_type
|
||||
let name = string ("const " ^ var.value) in
|
||||
match param_type with
|
||||
None -> name
|
||||
| Some (_, e) ->
|
||||
prefix 2 1 (name ^^ string " :") (pp_type_expr e)
|
||||
|
||||
and pp_param_var {value; _} =
|
||||
let {var; param_type; _} : param_var = value in
|
||||
let name = string ("var " ^ var.value)
|
||||
in prefix 2 1 name @@ pp_option (fun (_,d) -> string ": " ^^ pp_type_expr d) param_type
|
||||
let name = string ("var " ^ var.value) in
|
||||
match param_type with
|
||||
None -> name
|
||||
| Some (_, e) ->
|
||||
prefix 2 1 (name ^^ string " :") (pp_type_expr e)
|
||||
|
||||
and pp_block {value; _} =
|
||||
string "block {"
|
||||
@ -192,7 +214,12 @@ and pp_data_decl = function
|
||||
and pp_var_decl {value; _} =
|
||||
let {name; var_type; init; _} = value in
|
||||
let start = string ("var " ^ name.value) in
|
||||
group (start ^/^ pp_option (fun (_,d) -> nest 2 (string ": " ^^ pp_type_expr d)) var_type)
|
||||
let start =
|
||||
match var_type with
|
||||
None -> start
|
||||
| Some (_, e) ->
|
||||
group (start ^/^ nest 2 (string ": " ^^ pp_type_expr e)) in
|
||||
start
|
||||
^^ group (break 1 ^^ nest 2 (string ":= " ^^ pp_expr init))
|
||||
|
||||
and pp_instruction = function
|
||||
@ -381,11 +408,11 @@ and pp_expr = function
|
||||
| EBlock e -> pp_block_with e
|
||||
|
||||
and pp_block_with {value; _} =
|
||||
let {block;kwd_with; expr;_} = value in
|
||||
let {block; kwd_with; expr} = value in
|
||||
let expr = value.expr in
|
||||
let expr = pp_expr expr in
|
||||
group(pp_block block ^^ string " with"
|
||||
^^ group (nest 4 (break 1 ^^ expr)))
|
||||
group (pp_block block ^^ string " with"
|
||||
^^ group (nest 4 (break 1 ^^ expr)))
|
||||
|
||||
and pp_annot_expr {value; _} =
|
||||
let expr, _, type_expr = value.inside in
|
||||
|
@ -1,7 +1,13 @@
|
||||
(* This module exports checks on scoping, called from the parser. *)
|
||||
|
||||
[@@@warning "-42"]
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module CST = Cst.Pascaligo
|
||||
module CST = Cst.Pascaligo
|
||||
|
||||
(* Errors *)
|
||||
|
||||
type t =
|
||||
Reserved_name of CST.variable
|
||||
@ -18,7 +24,7 @@ open Region
|
||||
|
||||
(* Useful modules *)
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module Ord =
|
||||
struct
|
||||
|
@ -1,7 +1,11 @@
|
||||
(* This module exports checks on scoping, called from the parser. *)
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module CST = Cst.Pascaligo
|
||||
module CST = Cst.Pascaligo
|
||||
|
||||
(* Errors *)
|
||||
|
||||
type t =
|
||||
Reserved_name of CST.variable
|
||||
|
1
src/passes/01-parsing/pascaligo/Stubs/Cst.ml
Normal file
1
src/passes/01-parsing/pascaligo/Stubs/Cst.ml
Normal file
@ -0,0 +1 @@
|
||||
module Pascaligo = CST
|
1
src/passes/01-parsing/pascaligo/Stubs/Cst_pascaligo.ml
Normal file
1
src/passes/01-parsing/pascaligo/Stubs/Cst_pascaligo.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserLog = ParserLog
|
1
src/passes/01-parsing/pascaligo/Stubs/Lexer_pascaligo.ml
Normal file
1
src/passes/01-parsing/pascaligo/Stubs/Lexer_pascaligo.ml
Normal file
@ -0,0 +1 @@
|
||||
module LexToken = LexToken
|
6
src/passes/01-parsing/pascaligo/Stubs/Lexer_shared.ml
Normal file
6
src/passes/01-parsing/pascaligo/Stubs/Lexer_shared.ml
Normal file
@ -0,0 +1,6 @@
|
||||
module EvalOpt = EvalOpt
|
||||
module Markup = Markup
|
||||
module Lexer = Lexer
|
||||
module LexerUnit = LexerUnit
|
||||
module LexerLog = LexerLog
|
||||
module LexerLib = LexerLib
|
@ -0,0 +1,2 @@
|
||||
module Pretty = Pretty
|
||||
module Parser = Parser
|
1
src/passes/01-parsing/pascaligo/Stubs/Parser_shared.ml
Normal file
1
src/passes/01-parsing/pascaligo/Stubs/Parser_shared.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserUnit = ParserUnit
|
@ -1,13 +1,48 @@
|
||||
;; --------------------------------------------------------------------
|
||||
;; LEXING
|
||||
|
||||
;; Build of the lexer
|
||||
|
||||
(ocamllex LexToken)
|
||||
|
||||
;; Build of the lexer as a library
|
||||
|
||||
(library
|
||||
(name lexer_pascaligo)
|
||||
(public_name ligo.lexer.pascaligo)
|
||||
(modules LexToken)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
;; Third party
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
lexer_pascaligo
|
||||
;; Third party
|
||||
hex)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; --------------------------------------------------------------------
|
||||
;; PARSING
|
||||
|
||||
;; Build of the parser
|
||||
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
(flags -la 1 --table --strict --explain
|
||||
--external-tokens Lexer_pascaligo.LexToken))
|
||||
|
||||
;; Build of the parser as a library
|
||||
|
||||
@ -15,19 +50,22 @@
|
||||
(name parser_pascaligo)
|
||||
(public_name ligo.parser.pascaligo)
|
||||
(modules
|
||||
Scoping pascaligo Parser LexToken ParErr Pretty)
|
||||
Scoping Parser ParErr Pretty)
|
||||
(libraries
|
||||
pprint
|
||||
terminal_size
|
||||
menhirLib
|
||||
parser_shared
|
||||
hex
|
||||
Preprocessor
|
||||
simple-utils
|
||||
cst)
|
||||
;; Ligo
|
||||
lexer_pascaligo
|
||||
parser_shared
|
||||
cst
|
||||
;; Vendors
|
||||
simple-utils
|
||||
;; Third party
|
||||
pprint
|
||||
terminal_size
|
||||
menhirLib
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||
(flags (:standard -open Cst_pascaligo))) ;; For CST in Parser.mli
|
||||
|
||||
;; Build of the unlexer (for covering the
|
||||
;; error states of the LR automaton)
|
||||
@ -35,30 +73,24 @@
|
||||
(executable
|
||||
(name Unlexer)
|
||||
(libraries str)
|
||||
(modules Unlexer)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules Unlexer))
|
||||
|
||||
;; Local build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
hex simple-utils tezos-utils parser_pascaligo)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Parser_pascaligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Local build of a standalone parser
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries parser_pascaligo)
|
||||
(modules ParserMain)
|
||||
(libraries
|
||||
;; Ligo
|
||||
parser_shared
|
||||
parser_pascaligo
|
||||
cst
|
||||
;; Third party
|
||||
hex)
|
||||
(modules ParserMain Parser_msg)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of the covering of error states in the LR automaton
|
||||
|
||||
@ -85,7 +117,14 @@
|
||||
(rule
|
||||
(targets all.ligo)
|
||||
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
|
||||
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=ligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly)))
|
||||
(action (run %{script_cover}
|
||||
--lex-tokens=LexToken.mli
|
||||
--par-tokens=ParToken.mly
|
||||
--ext=ligo
|
||||
--unlexer=./Unlexer.exe
|
||||
--messages=Parser.msg
|
||||
--dir=.
|
||||
--concatenate Parser.mly)))
|
||||
|
||||
;; Error messages
|
||||
|
||||
@ -95,58 +134,45 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(mode (promote (until-clean) (only *)))
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(alias
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages
|
||||
)
|
||||
)
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages)))
|
||||
|
||||
(rule
|
||||
(targets ParErr.ml)
|
||||
@ -154,16 +180,12 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in))))
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
||||
module Lexer = Lexer
|
||||
module LexToken = LexToken
|
||||
module Parser = Parser
|
@ -1,12 +1,13 @@
|
||||
module CST = Cst.Cameligo
|
||||
module LexToken = Parser_reasonligo.LexToken
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module LexToken = Lexer_reasonligo.LexToken
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
module Scoping = Parser_cameligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_reasonligo.ParErr
|
||||
module SyntaxError = Parser_reasonligo.SyntaxError
|
||||
module SSet = Set.Make (String)
|
||||
module Pretty = Parser_reasonligo.Pretty
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
|
||||
(* Mock IOs TODO: Fill them with CLI options *)
|
||||
|
||||
|
@ -5,14 +5,10 @@ $HOME/git/OCaml-build/Makefile
|
||||
../shared/LexerLib.ml
|
||||
../shared/EvalOpt.ml
|
||||
../shared/EvalOpt.mli
|
||||
../shared/FQueue.ml
|
||||
../shared/FQueue.mli
|
||||
../shared/LexerLog.mli
|
||||
../shared/LexerLog.ml
|
||||
../shared/Markup.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.mli
|
||||
@ -20,10 +16,13 @@ $HOME/git/OCaml-build/Makefile
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
|
||||
Stubs/Parser_cameligo.ml
|
||||
./Stubs/Lexer_shared.ml
|
||||
./Stubs/Lexer_reasonligo.ml
|
||||
./Stubs/Parser_shared.ml
|
||||
./Stubs/Parser_reasonligo.ml
|
||||
./Stubs/Cst.ml
|
||||
./Stubs/Cst_cameligo.ml
|
||||
|
||||
../cameligo/AST.ml
|
||||
../cameligo/ParserLog.mli
|
||||
../cameligo/ParserLog.ml
|
||||
../cameligo/Scoping.mli
|
||||
../cameligo/Scoping.ml
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/CST.ml
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.mli
|
||||
$HOME/git/ligo/src/stages/1-cst/cameligo/ParserLog.ml
|
||||
|
@ -21,13 +21,16 @@
|
||||
aliased to [token].
|
||||
*)
|
||||
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
|
||||
type lexeme = string
|
||||
module Markup = Lexer_shared.Markup
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type t =
|
||||
(* Identifiers, labels, numbers and strings *)
|
||||
|
||||
|
@ -1,19 +1,19 @@
|
||||
(* ocamlex specification for ReasonLIGO *)
|
||||
{
|
||||
(* START OF HEADER *)
|
||||
|
||||
(* Shorthands *)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module SMap = Utils.String.Map
|
||||
module SSet = Utils.String.Set
|
||||
|
||||
type lexeme = string
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
module Markup = Lexer_shared.Markup
|
||||
module SMap = Map.Make (String)
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* TOKENS *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type t =
|
||||
(* Identifiers, labels, numbers and strings *)
|
||||
|
||||
@ -103,6 +103,8 @@ type t =
|
||||
|
||||
(* Projections *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
type token = t
|
||||
|
||||
let proj_token = function
|
||||
|
@ -1,6 +1,14 @@
|
||||
(* Driver for the ReasonLIGO lexer *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerUnit = Lexer_shared.LexerUnit
|
||||
module LexToken = Lexer_reasonligo.LexToken
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
|
@ -1,4 +1,5 @@
|
||||
%{
|
||||
module LexToken = Lexer_reasonligo.LexToken
|
||||
%}
|
||||
|
||||
(* Tokens (mirroring those defined in module LexToken) *)
|
||||
|
@ -1,7 +1,16 @@
|
||||
(* Driver for the ReasonLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module LexToken = Lexer_reasonligo.LexToken
|
||||
module CST = Cst.Cameligo
|
||||
module SSet = Set.Make (String)
|
||||
module ParserUnit = Parser_shared.ParserUnit
|
||||
module Pretty = Parser_reasonligo.Pretty
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
@ -55,22 +64,22 @@ module SubIO =
|
||||
|
||||
module Parser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Parser_reasonligo.Parser
|
||||
end
|
||||
|
||||
module ParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include ParserLog
|
||||
type ast = CST.t
|
||||
type expr = CST.expr
|
||||
include Cst_cameligo.ParserLog
|
||||
end
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Lexer = Lexer_shared.Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
ParserUnit.Make (Lexer)(CST)(Parser)(Parser_msg)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
|
@ -5,6 +5,7 @@ open CST
|
||||
module Region = Simple_utils.Region
|
||||
open! Region
|
||||
open! PPrint
|
||||
module Option = Simple_utils.Option
|
||||
|
||||
let rec print ast =
|
||||
let app decl = group (pp_declaration decl) in
|
||||
|
1
src/passes/01-parsing/reasonligo/Scoping.ml
Symbolic link
1
src/passes/01-parsing/reasonligo/Scoping.ml
Symbolic link
@ -0,0 +1 @@
|
||||
../cameligo/Scoping.ml
|
1
src/passes/01-parsing/reasonligo/Scoping.mli
Symbolic link
1
src/passes/01-parsing/reasonligo/Scoping.mli
Symbolic link
@ -0,0 +1 @@
|
||||
../cameligo/Scoping.mli
|
1
src/passes/01-parsing/reasonligo/Stubs/Cst.ml
Normal file
1
src/passes/01-parsing/reasonligo/Stubs/Cst.ml
Normal file
@ -0,0 +1 @@
|
||||
module Cameligo = CST
|
1
src/passes/01-parsing/reasonligo/Stubs/Cst_cameligo.ml
Normal file
1
src/passes/01-parsing/reasonligo/Stubs/Cst_cameligo.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserLog = ParserLog
|
@ -0,0 +1 @@
|
||||
module LexToken = LexToken
|
6
src/passes/01-parsing/reasonligo/Stubs/Lexer_shared.ml
Normal file
6
src/passes/01-parsing/reasonligo/Stubs/Lexer_shared.ml
Normal file
@ -0,0 +1,6 @@
|
||||
module EvalOpt = EvalOpt
|
||||
module Markup = Markup
|
||||
module Lexer = Lexer
|
||||
module LexerUnit = LexerUnit
|
||||
module LexerLog = LexerLog
|
||||
module LexerLib = LexerLib
|
@ -1 +0,0 @@
|
||||
module AST = AST
|
@ -0,0 +1,2 @@
|
||||
module Pretty = Pretty
|
||||
module Parser = Parser
|
1
src/passes/01-parsing/reasonligo/Stubs/Parser_shared.ml
Normal file
1
src/passes/01-parsing/reasonligo/Stubs/Parser_shared.ml
Normal file
@ -0,0 +1 @@
|
||||
module ParserUnit = ParserUnit
|
@ -1,6 +1,6 @@
|
||||
module CST = Cst.Cameligo
|
||||
|
||||
type error =
|
||||
type error =
|
||||
| WrongFunctionArguments of CST.expr
|
||||
| InvalidWild of CST.expr
|
||||
|
||||
|
@ -1,30 +1,71 @@
|
||||
;; --------------------------------------------------------------------
|
||||
;; LEXING
|
||||
|
||||
;; Build of the lexer
|
||||
|
||||
(ocamllex LexToken)
|
||||
|
||||
;; Build of the lexer as a library
|
||||
|
||||
(library
|
||||
(name lexer_reasonligo)
|
||||
(public_name ligo.lexer.reasonligo)
|
||||
(modules LexToken)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
;; Third party
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
lexer_reasonligo
|
||||
;; Third party
|
||||
hex)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; --------------------------------------------------------------------
|
||||
;; PARSING
|
||||
|
||||
;; Build of the parser
|
||||
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
(flags -la 1 --table --strict --explain
|
||||
--external-tokens Lexer_reasonligo.LexToken))
|
||||
|
||||
;; Build of the parser as a library
|
||||
|
||||
(library
|
||||
(name parser_reasonligo)
|
||||
(public_name ligo.parser.reasonligo)
|
||||
(modules
|
||||
SyntaxError reasonligo LexToken ParErr Parser Pretty)
|
||||
(libraries
|
||||
menhirLib
|
||||
(name parser_reasonligo)
|
||||
(public_name ligo.parser.reasonligo)
|
||||
(modules
|
||||
SyntaxError Scoping Parser ParErr Pretty)
|
||||
(libraries
|
||||
;; Ligo
|
||||
lexer_reasonligo
|
||||
parser_shared
|
||||
parser_cameligo
|
||||
str
|
||||
simple-utils)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils -open Parser_cameligo)))
|
||||
cst
|
||||
;; Vendors
|
||||
simple-utils
|
||||
;; Third party
|
||||
pprint
|
||||
terminal_size
|
||||
menhirLib
|
||||
hex)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Cst_cameligo))) ;; For CST in Parser.mli
|
||||
|
||||
;; Build of the unlexer (for covering the
|
||||
;; error states of the LR automaton)
|
||||
@ -32,38 +73,31 @@
|
||||
(executable
|
||||
(name Unlexer)
|
||||
(libraries str)
|
||||
(modules Unlexer)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules Unlexer))
|
||||
|
||||
;; Local build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries parser_reasonligo)
|
||||
(modules LexerMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Parser_reasonligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Local build of a standalone parser
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries
|
||||
parser_reasonligo
|
||||
parser_cameligo)
|
||||
(modules ParserMain)
|
||||
;; Ligo
|
||||
parser_shared
|
||||
parser_reasonligo
|
||||
cst
|
||||
;; Third party
|
||||
hex)
|
||||
(modules ParserMain Parser_msg)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of the covering of error states in the LR automaton
|
||||
|
||||
(rule
|
||||
(targets Parser.msg)
|
||||
(deps (:script_messages ../../../../vendors/ligo-utils/simple-utils/messages.sh) Parser.mly LexToken.mli ParToken.mly)
|
||||
(action (run %{script_messages} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly Parser.mly )))
|
||||
(action (run %{script_messages} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly Parser.mly)))
|
||||
|
||||
(rule
|
||||
(targets Parser_msg.ml)
|
||||
@ -83,7 +117,16 @@
|
||||
(rule
|
||||
(targets all.religo)
|
||||
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
|
||||
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=religo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))
|
||||
(action (run %{script_cover}
|
||||
--lex-tokens=LexToken.mli
|
||||
--par-tokens=ParToken.mly
|
||||
--ext=religo
|
||||
--unlexer=./Unlexer.exe
|
||||
--messages=Parser.msg
|
||||
--dir=.
|
||||
--concatenate Parser.mly)))
|
||||
|
||||
;; Error messages
|
||||
|
||||
(rule
|
||||
(targets error.messages)
|
||||
@ -91,60 +134,45 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(mode (promote (until-clean) (only *)))
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly))))
|
||||
|
||||
(alias
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compare-errors error.messages.new
|
||||
--compare-errors error.messages)))
|
||||
|
||||
(rule
|
||||
(targets ParErr.ml)
|
||||
@ -152,16 +180,12 @@
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in
|
||||
)
|
||||
))
|
||||
)
|
||||
(run menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
--base Parser.mly
|
||||
ParToken.mly
|
||||
Parser.mly
|
||||
--compile-errors error.messages.checked-in))))
|
||||
|
@ -1,4 +0,0 @@
|
||||
module Parser = Parser
|
||||
module Lexer = Lexer
|
||||
module LexToken = LexToken
|
||||
module SyntaxError = SyntaxError
|
@ -438,13 +438,13 @@ and scan state = parse
|
||||
|
||||
(* String *)
|
||||
|
||||
| '"' { let opening, _, state = state#sync lexbuf in
|
||||
let thread = LexerLib.mk_thread opening "" in
|
||||
scan_string thread state lexbuf |> mk_string }
|
||||
| '"' { let opening, _, state = state#sync lexbuf in
|
||||
let thread = LexerLib.mk_thread opening in
|
||||
scan_string thread state lexbuf |> mk_string }
|
||||
|
||||
| "{|" { let opening, _, state = state#sync lexbuf in
|
||||
let thread = LexerLib.mk_thread opening "" in
|
||||
scan_verbatim thread state lexbuf |> mk_verbatim }
|
||||
let thread = LexerLib.mk_thread opening in
|
||||
scan_verbatim thread state lexbuf |> mk_verbatim }
|
||||
|
||||
(* Comments *)
|
||||
|
||||
@ -453,33 +453,28 @@ and scan state = parse
|
||||
match state#block with
|
||||
Some block when block#opening = lexeme ->
|
||||
let opening, _, state = state#sync lexbuf in
|
||||
let thread = LexerLib.mk_thread opening lexeme in
|
||||
let thread, state = scan_block block thread state lexbuf in
|
||||
let state = state#push_block thread
|
||||
in scan state lexbuf
|
||||
| Some _ | None ->
|
||||
let n = String.length lexeme in
|
||||
begin
|
||||
LexerLib.rollback lexbuf;
|
||||
assert (n > 0);
|
||||
scan (scan_n_sym n state lexbuf) lexbuf
|
||||
end }
|
||||
let thread = LexerLib.mk_thread opening in
|
||||
let thread = thread#push_string lexeme in
|
||||
let thread, state = scan_block block thread state lexbuf
|
||||
in scan (state#push_block thread) lexbuf
|
||||
| Some _ | None -> (* Not a comment for this LIGO syntax *)
|
||||
let n = String.length lexeme in
|
||||
let () = LexerLib.rollback lexbuf in
|
||||
scan (scan_n_sym n state lexbuf) lexbuf }
|
||||
|
||||
| line_comments {
|
||||
let lexeme = Lexing.lexeme lexbuf in
|
||||
match state#line with
|
||||
Some line when line = lexeme ->
|
||||
let opening, _, state = state#sync lexbuf in
|
||||
let thread = LexerLib.mk_thread opening lexeme in
|
||||
let thread, state = scan_line thread state lexbuf in
|
||||
let state = state#push_line thread
|
||||
in scan state lexbuf
|
||||
| Some _ | None ->
|
||||
let n = String.length lexeme in
|
||||
begin
|
||||
LexerLib.rollback lexbuf;
|
||||
scan (scan_n_sym n state lexbuf) lexbuf
|
||||
end }
|
||||
let thread = LexerLib.mk_thread opening in
|
||||
let thread = thread#push_string lexeme in
|
||||
let thread, state = scan_line thread state lexbuf
|
||||
in scan (state#push_line thread) lexbuf
|
||||
| Some _ | None -> (* Not a comment for this LIGO syntax *)
|
||||
let n = String.length lexeme in
|
||||
let () = LexerLib.rollback lexbuf in
|
||||
scan (scan_n_sym n state lexbuf) lexbuf }
|
||||
|
||||
| _ as c { let region, _, _ = state#sync lexbuf
|
||||
in fail region (Unexpected_character c) }
|
||||
@ -488,8 +483,7 @@ and scan state = parse
|
||||
|
||||
and scan_n_sym n state = parse
|
||||
symbol { let state = mk_sym state lexbuf in
|
||||
if n = 1 then state
|
||||
else scan_n_sym (n-1) state lexbuf }
|
||||
if n = 1 then state else scan_n_sym (n-1) state lexbuf }
|
||||
|
||||
(* Scanning #include flag *)
|
||||
|
||||
@ -552,7 +546,6 @@ and scan_block block thread state = parse
|
||||
in scan_block block thread state lexbuf
|
||||
else let () = LexerLib.rollback lexbuf in
|
||||
let n = String.length lexeme in
|
||||
let () = assert (n > 0) in
|
||||
let state = scan_n_sym n state lexbuf
|
||||
in scan_block block thread state lexbuf }
|
||||
|
||||
@ -563,7 +556,6 @@ and scan_block block thread state = parse
|
||||
in thread#push_string lexeme, state
|
||||
else let () = LexerLib.rollback lexbuf in
|
||||
let n = String.length lexeme in
|
||||
let () = assert (n > 0) in
|
||||
let state = scan_n_sym n state lexbuf
|
||||
in scan_block block thread state lexbuf }
|
||||
|
||||
@ -588,27 +580,6 @@ and scan_block block thread state = parse
|
||||
let region = Region.make ~start:state#pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
(* Finishing a line comment *)
|
||||
|
||||
and scan_line thread state = parse
|
||||
nl as nl { let () = Lexing.new_line lexbuf
|
||||
and thread = thread#push_string nl
|
||||
and state = state#set_pos (state#pos#new_line nl)
|
||||
in thread, state }
|
||||
| eof { thread, state }
|
||||
| _ { let () = LexerLib.rollback lexbuf in
|
||||
let len = thread#length in
|
||||
let thread,
|
||||
status = scan_utf8_inline thread state lexbuf in
|
||||
let delta = thread#length - len in
|
||||
let pos = state#pos#shift_one_uchar delta in
|
||||
match status with
|
||||
Stdlib.Ok () ->
|
||||
scan_line thread (state#set_pos pos) lexbuf
|
||||
| Error error ->
|
||||
let region = Region.make ~start:state#pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
and scan_utf8 block thread state = parse
|
||||
eof { let err = Unterminated_comment block#closing
|
||||
in fail thread#opening err }
|
||||
@ -621,6 +592,27 @@ and scan_utf8 block thread state = parse
|
||||
| `Await -> scan_utf8 block thread state lexbuf
|
||||
| `End -> assert false }
|
||||
|
||||
(* Finishing a line comment *)
|
||||
|
||||
and scan_line thread state = parse
|
||||
nl as nl { let () = Lexing.new_line lexbuf
|
||||
and thread = thread#push_string nl
|
||||
and state = state#set_pos (state#pos#new_line nl)
|
||||
in thread, state }
|
||||
| eof { thread, state }
|
||||
| _ { let () = LexerLib.rollback lexbuf in
|
||||
let len = thread#length in
|
||||
let thread,
|
||||
status = scan_utf8_inline thread state lexbuf in
|
||||
let delta = thread#length - len in
|
||||
let pos = state#pos#shift_one_uchar delta in
|
||||
match status with
|
||||
Stdlib.Ok () ->
|
||||
scan_line thread (state#set_pos pos) lexbuf
|
||||
| Error error ->
|
||||
let region = Region.make ~start:state#pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
and scan_utf8_inline thread state = parse
|
||||
eof { thread, Stdlib.Ok () }
|
||||
| _ as c { let thread = thread#push_char c in
|
||||
|
@ -1,5 +1,8 @@
|
||||
(* A library for writing UTF8-aware lexers *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module Pos = Simple_utils.Pos
|
||||
module FQueue = Simple_utils.FQueue
|
||||
|
||||
(* LEXER ENGINE *)
|
||||
|
||||
@ -69,7 +72,7 @@ type thread = <
|
||||
set_opening : Region.t -> thread
|
||||
>
|
||||
|
||||
let mk_thread region lexeme : thread =
|
||||
let mk_thread region : thread =
|
||||
(* The call [explode s a] is the list made by pushing the characters
|
||||
in the string [s] on top of [a], in reverse order. For example,
|
||||
[explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *)
|
||||
@ -83,10 +86,10 @@ let mk_thread region lexeme : thread =
|
||||
val opening = region
|
||||
method opening = opening
|
||||
|
||||
val length = String.length lexeme
|
||||
val length = 0
|
||||
method length = length
|
||||
|
||||
val acc = explode lexeme []
|
||||
val acc = []
|
||||
method acc = acc
|
||||
|
||||
method set_opening opening = {< opening; length; acc >}
|
||||
@ -100,10 +103,10 @@ let mk_thread region lexeme : thread =
|
||||
acc = explode str acc >}
|
||||
|
||||
(* The value of [thread#to_string] is a string of length
|
||||
[thread#length] containing the [thread#length] characters in
|
||||
the list [thread#acc], in reverse order. For instance,
|
||||
[thread#to_string = "abc"] if [thread#length = 3] and
|
||||
[thread#acc = ['c';'b';'a']]. *)
|
||||
[thread#length] containing the characters in the list
|
||||
[thread#acc], in reverse order. For instance, [thread#to_string
|
||||
= "abc"] if [thread#length = 3] and [thread#acc =
|
||||
['c';'b';'a']]. *)
|
||||
|
||||
method to_string =
|
||||
let bytes = Bytes.make length ' ' in
|
||||
@ -159,15 +162,16 @@ type 'token window =
|
||||
| Two of 'token * 'token
|
||||
|
||||
type 'token state = <
|
||||
units : (Markup.t list * 'token) FQueue.t;
|
||||
markup : Markup.t list;
|
||||
window : 'token window;
|
||||
last : Region.t;
|
||||
pos : Pos.t;
|
||||
decoder : Uutf.decoder;
|
||||
supply : Bytes.t -> int -> int -> unit;
|
||||
block : EvalOpt.block_comment option;
|
||||
line : EvalOpt.line_comment option;
|
||||
units : (Markup.t list * 'token) FQueue.t;
|
||||
markup : Markup.t list;
|
||||
comments : Markup.comment FQueue.t;
|
||||
window : 'token window;
|
||||
last : Region.t;
|
||||
pos : Pos.t;
|
||||
decoder : Uutf.decoder;
|
||||
supply : Bytes.t -> int -> int -> unit;
|
||||
block : EvalOpt.block_comment option;
|
||||
line : EvalOpt.line_comment option;
|
||||
|
||||
enqueue : 'token -> 'token state;
|
||||
set_units : (Markup.t list * 'token) FQueue.t -> 'token state;
|
||||
@ -184,25 +188,28 @@ type 'token state = <
|
||||
push_tabs : Lexing.lexbuf -> 'token state;
|
||||
push_bom : Lexing.lexbuf -> 'token state;
|
||||
push_markup : Markup.t -> 'token state;
|
||||
push_comment : Markup.comment -> 'token state
|
||||
>
|
||||
|
||||
let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply
|
||||
let mk_state ~units ~markup ~comments ~window ~last ~pos ~decoder ~supply
|
||||
?block ?line () : _ state =
|
||||
object (self)
|
||||
val units = units
|
||||
method units = units
|
||||
val markup = markup
|
||||
method markup = markup
|
||||
val window = window
|
||||
method window = window
|
||||
val last = last
|
||||
method last = last
|
||||
val pos = pos
|
||||
method pos = pos
|
||||
method decoder = decoder
|
||||
method supply = supply
|
||||
method block = block
|
||||
method line = line
|
||||
val units = units
|
||||
method units = units
|
||||
val markup = markup
|
||||
method markup = markup
|
||||
val comments = comments
|
||||
method comments = comments
|
||||
val window = window
|
||||
method window = window
|
||||
val last = last
|
||||
method last = last
|
||||
val pos = pos
|
||||
method pos = pos
|
||||
method decoder = decoder
|
||||
method supply = supply
|
||||
method block = block
|
||||
method line = line
|
||||
|
||||
method enqueue token =
|
||||
{< units = FQueue.enq (markup, token) units;
|
||||
@ -229,6 +236,9 @@ let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply
|
||||
|
||||
(* Committing markup to the current logical state *)
|
||||
|
||||
method push_comment comment =
|
||||
{< comments = FQueue.enq comment comments >}
|
||||
|
||||
method push_markup unit = {< markup = unit :: markup >}
|
||||
|
||||
method push_newline buffer =
|
||||
@ -238,21 +248,23 @@ let mk_state ~units ~markup ~window ~last ~pos ~decoder ~supply
|
||||
let stop = start#new_line value in
|
||||
let region = Region.make ~start ~stop in
|
||||
let unit = Markup.Newline Region.{region; value}
|
||||
in {< pos = stop; markup = unit::markup >}
|
||||
in (self#push_markup unit)#set_pos stop
|
||||
|
||||
method push_line thread =
|
||||
let start = thread#opening#start in
|
||||
let region = Region.make ~start ~stop:self#pos
|
||||
and value = thread#to_string in
|
||||
let unit = Markup.LineCom Region.{region; value}
|
||||
in {< markup = unit::markup >}
|
||||
let reg = Region.{region; value} in
|
||||
let unit = Markup.LineCom reg
|
||||
in (self#push_markup unit)#push_comment (Markup.Line reg)
|
||||
|
||||
method push_block thread =
|
||||
let start = thread#opening#start in
|
||||
let region = Region.make ~start ~stop:self#pos
|
||||
and value = thread#to_string in
|
||||
let unit = Markup.BlockCom Region.{region; value}
|
||||
in {< markup = unit::markup >}
|
||||
let reg = Region.{region; value} in
|
||||
let unit = Markup.BlockCom reg
|
||||
in (self#push_markup unit)#push_comment (Markup.Block reg)
|
||||
|
||||
method push_space buffer =
|
||||
let region, lex, state = self#sync buffer in
|
||||
@ -283,14 +295,15 @@ type input =
|
||||
type 'token logger = Markup.t list -> 'token -> unit
|
||||
|
||||
type 'token instance = {
|
||||
input : input;
|
||||
read : log:('token logger) -> Lexing.lexbuf -> 'token;
|
||||
buffer : Lexing.lexbuf;
|
||||
get_win : unit -> 'token window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
close : unit -> unit
|
||||
input : input;
|
||||
read : log:('token logger) -> Lexing.lexbuf -> 'token;
|
||||
buffer : Lexing.lexbuf;
|
||||
close : unit -> unit;
|
||||
get_win : unit -> 'token window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
get_comments : unit -> Markup.comment FQueue.t
|
||||
}
|
||||
|
||||
type open_err = File_opening of string
|
||||
@ -329,15 +342,18 @@ let open_token_stream ?line ?block ~scan
|
||||
~window:Nil
|
||||
~pos
|
||||
~markup:[]
|
||||
~comments:FQueue.empty
|
||||
~decoder
|
||||
~supply
|
||||
?block
|
||||
?line
|
||||
()) in
|
||||
let get_pos () = !state#pos
|
||||
and get_last () = !state#last
|
||||
and get_win () = !state#window
|
||||
and get_file () = file_path in
|
||||
|
||||
let get_pos () = !state#pos
|
||||
and get_last () = !state#last
|
||||
and get_win () = !state#window
|
||||
and get_comments () = !state#comments
|
||||
and get_file () = file_path in
|
||||
|
||||
let patch_buffer (start, stop) buffer =
|
||||
let open Lexing in
|
||||
@ -368,8 +384,8 @@ let open_token_stream ?line ?block ~scan
|
||||
| Some (units, (left_mark, token)) ->
|
||||
log left_mark token;
|
||||
state := ((!state#set_units units)
|
||||
#set_last (token_to_region token))
|
||||
#slide_token token;
|
||||
#set_last (token_to_region token))
|
||||
#slide_token token;
|
||||
style token (next_token scan) buffer;
|
||||
patch_buffer (token_to_region token)#byte_pos buffer;
|
||||
token in
|
||||
@ -382,6 +398,7 @@ let open_token_stream ?line ?block ~scan
|
||||
| _ -> () in
|
||||
let instance = {
|
||||
read = read scan ~token_to_region ~style;
|
||||
input; buffer; get_win; get_pos; get_last; get_file; close}
|
||||
input; buffer; close;
|
||||
get_win; get_pos; get_last; get_file; get_comments}
|
||||
in Ok instance
|
||||
| Error _ as e -> e
|
||||
|
@ -1,7 +1,8 @@
|
||||
(* A library for writing UTF8-aware lexers *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module Pos = Simple_utils.Pos
|
||||
module FQueue = Simple_utils.FQueue
|
||||
|
||||
(* The function [rollback] resets the lexing buffer to the state it
|
||||
was when it matched the last regular expression. This function is
|
||||
@ -39,7 +40,7 @@ type thread = <
|
||||
set_opening : Region.t -> thread
|
||||
>
|
||||
|
||||
val mk_thread : Region.t -> lexeme -> thread
|
||||
val mk_thread : Region.t -> thread
|
||||
|
||||
(* STATE *)
|
||||
|
||||
@ -108,15 +109,16 @@ type 'token window =
|
||||
| Two of 'token * 'token
|
||||
|
||||
type 'token state = <
|
||||
units : (Markup.t list * 'token) FQueue.t;
|
||||
markup : Markup.t list;
|
||||
window : 'token window;
|
||||
last : Region.t;
|
||||
pos : Pos.t;
|
||||
decoder : Uutf.decoder;
|
||||
supply : Bytes.t -> int -> int -> unit;
|
||||
block : EvalOpt.block_comment option;
|
||||
line : EvalOpt.line_comment option;
|
||||
units : (Markup.t list * 'token) FQueue.t;
|
||||
markup : Markup.t list;
|
||||
comments : Markup.comment FQueue.t;
|
||||
window : 'token window;
|
||||
last : Region.t;
|
||||
pos : Pos.t;
|
||||
decoder : Uutf.decoder;
|
||||
supply : Bytes.t -> int -> int -> unit;
|
||||
block : EvalOpt.block_comment option;
|
||||
line : EvalOpt.line_comment option;
|
||||
|
||||
enqueue : 'token -> 'token state;
|
||||
set_units : (Markup.t list * 'token) FQueue.t -> 'token state;
|
||||
@ -133,6 +135,7 @@ type 'token state = <
|
||||
push_tabs : Lexing.lexbuf -> 'token state;
|
||||
push_bom : Lexing.lexbuf -> 'token state;
|
||||
push_markup : Markup.t -> 'token state;
|
||||
push_comment : Markup.comment -> 'token state
|
||||
>
|
||||
|
||||
(* LEXER INSTANCE *)
|
||||
@ -178,11 +181,12 @@ type 'token instance = {
|
||||
input : input;
|
||||
read : log:('token logger) -> Lexing.lexbuf -> 'token;
|
||||
buffer : Lexing.lexbuf;
|
||||
close : unit -> unit;
|
||||
get_win : unit -> 'token window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
close : unit -> unit
|
||||
get_comments : unit -> Markup.comment FQueue.t
|
||||
}
|
||||
|
||||
type open_err = File_opening of string
|
||||
|
@ -42,3 +42,9 @@ let to_string markup ?(offsets=true) mode =
|
||||
region, sprintf "BOM \"%s\"" (String.escaped value) in
|
||||
let reg_str = region#compact ~offsets mode
|
||||
in sprintf "%s: %s" reg_str val_str
|
||||
|
||||
(* Comments *)
|
||||
|
||||
type comment =
|
||||
Line of lexeme Region.reg
|
||||
| Block of lexeme Region.reg
|
||||
|
@ -1,11 +1,12 @@
|
||||
(** This module defines the sorts of markup recognised by the LIGO
|
||||
(* This module defines the sorts of markup recognised by the LIGO
|
||||
lexer *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
(** A lexeme is piece of concrete syntax belonging to a token. In
|
||||
(* A lexeme is piece of concrete syntax belonging to a token. In
|
||||
algebraic terms, a token is also a piece of abstract lexical
|
||||
syntax. Lexical units emcompass both markup and lexemes. *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
type t =
|
||||
@ -18,7 +19,7 @@ type t =
|
||||
|
||||
type markup = t
|
||||
|
||||
(** Pretty-printing of markup
|
||||
(* Pretty-printing of markup
|
||||
|
||||
The difference between [to_lexeme] and [to_string] is that the
|
||||
former builds the corresponding concrete syntax (the lexeme),
|
||||
@ -31,3 +32,9 @@ type markup = t
|
||||
|
||||
val to_lexeme : t -> lexeme
|
||||
val to_string : t -> ?offsets:bool -> [`Byte | `Point] -> string
|
||||
|
||||
(* Comments *)
|
||||
|
||||
type comment =
|
||||
Line of lexeme Region.reg
|
||||
| Block of lexeme Region.reg
|
||||
|
@ -1,6 +1,14 @@
|
||||
(* Generic parser for LIGO *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerLib = Lexer_shared.LexerLib
|
||||
module LexerLog = Lexer_shared.LexerLog
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
type options = <
|
||||
offsets : bool;
|
||||
|
@ -1,6 +1,13 @@
|
||||
(* Generic parser API for LIGO *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerLib = Lexer_shared.LexerLib
|
||||
|
||||
(* Input/Output *)
|
||||
|
||||
type options = <
|
||||
offsets : bool;
|
||||
|
@ -1,8 +1,16 @@
|
||||
(* Functor to build a LIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Preproc = Preprocessor.Preproc
|
||||
module SSet = Set.Make (String)
|
||||
(* Dependencies *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module LexerLib = Lexer_shared.LexerLib
|
||||
module LexerLog = Lexer_shared.LexerLog
|
||||
module Preproc = Preprocessor.Preproc
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* A subtype of [EvalOpt.options] *)
|
||||
|
||||
module type SubIO =
|
||||
sig
|
||||
|
@ -1,8 +1,12 @@
|
||||
(* Functor to build a standalone LIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
(* Dependencies *)
|
||||
|
||||
module SSet : Set.S with type elt = string and type t = Set.Make(String).t
|
||||
module Region = Simple_utils.Region
|
||||
module EvalOpt = Lexer_shared.EvalOpt
|
||||
module Lexer = Lexer_shared.Lexer
|
||||
module SSet : Set.S with type elt = string
|
||||
and type t = Set.Make(String).t
|
||||
|
||||
(* A subtype of [EvalOpt.options] *)
|
||||
|
||||
|
@ -1,29 +1,51 @@
|
||||
;; Build of the lexer in common to all Ligo's dialects
|
||||
|
||||
(ocamllex Lexer)
|
||||
|
||||
;; Build of the lexer as a library
|
||||
|
||||
(library
|
||||
(name lexer_shared)
|
||||
(public_name ligo.lexer.shared)
|
||||
(libraries
|
||||
;; Ligo
|
||||
simple-utils
|
||||
;; Vendors
|
||||
Preprocessor
|
||||
;; Third party
|
||||
uutf
|
||||
getopt
|
||||
zarith)
|
||||
(modules
|
||||
LexerLib
|
||||
LexerUnit
|
||||
Lexer
|
||||
LexerLog
|
||||
Markup
|
||||
EvalOpt
|
||||
Version)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
|
||||
;; Build of the parser as a library
|
||||
|
||||
(library
|
||||
(name parser_shared)
|
||||
(public_name ligo.parser.shared)
|
||||
(libraries
|
||||
menhirLib
|
||||
;; Ligo
|
||||
lexer_shared
|
||||
simple-utils
|
||||
uutf
|
||||
getopt
|
||||
zarith
|
||||
Preprocessor)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
;; Third party
|
||||
menhirLib)
|
||||
(modules
|
||||
LexerLib
|
||||
LexerUnit
|
||||
ParserUnit
|
||||
ParserAPI
|
||||
Lexer
|
||||
LexerLog
|
||||
Markup
|
||||
Utils
|
||||
FQueue
|
||||
EvalOpt
|
||||
Version))
|
||||
ParserAPI)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
;; (flags (:standard -open Lexer_shared)))
|
||||
|
||||
;; Build of the version source (for the user, as a CLI option)
|
||||
|
||||
(rule
|
||||
(targets Version.ml)
|
||||
|
@ -13,14 +13,14 @@ let wrap : type a. a -> a CST.reg = fun value -> {value;region=rg}
|
||||
let list_to_sepseq lst =
|
||||
match lst with
|
||||
[] -> None
|
||||
| hd :: lst ->
|
||||
| hd :: lst ->
|
||||
let aux e = (rg, e) in
|
||||
Some (hd, List.map aux lst)
|
||||
let list_to_nsepseq lst =
|
||||
let list_to_nsepseq lst =
|
||||
match list_to_sepseq lst with
|
||||
Some s -> ok @@ s
|
||||
| None -> failwith "List is empty"
|
||||
|
||||
|
||||
let nelist_to_npseq (hd, lst) = (hd, List.map (fun e -> (rg, e)) lst)
|
||||
let npseq_cons hd lst = hd,(rg, fst lst)::(snd lst)
|
||||
|
||||
@ -39,7 +39,7 @@ let decompile_variable : type a. a Var.t -> CST.variable = fun var ->
|
||||
if String.contains var '#' then
|
||||
let var = String.split_on_char '#' var in
|
||||
wrap @@ "gen__" ^ (String.concat "" var)
|
||||
else
|
||||
else
|
||||
if String.length var > 4 && String.equal "gen__" @@ String.sub var 0 5 then
|
||||
wrap @@ "user__" ^ var
|
||||
else
|
||||
@ -51,23 +51,23 @@ let rec decompile_type_expr : AST.type_expression -> _ result = fun te ->
|
||||
T_sum sum ->
|
||||
let sum = AST.CMap.to_kv_list sum in
|
||||
let aux (AST.Constructor c, AST.{ctor_type;_}) =
|
||||
let constr = wrap c in
|
||||
let constr = wrap c in
|
||||
let%bind arg = decompile_type_expr ctor_type in
|
||||
let arg = Some (rg, arg) in
|
||||
let variant : CST.variant = {constr;arg} in
|
||||
ok @@ wrap variant
|
||||
ok @@ wrap variant
|
||||
in
|
||||
let%bind sum = bind_map_list aux sum in
|
||||
let%bind sum = list_to_nsepseq sum in
|
||||
return @@ CST.TSum (wrap sum)
|
||||
| T_record record ->
|
||||
| T_record record ->
|
||||
let record = AST.LMap.to_kv_list record in
|
||||
let aux (AST.Label c, AST.{field_type;_}) =
|
||||
let field_name = wrap c in
|
||||
let field_name = wrap c in
|
||||
let colon = rg in
|
||||
let%bind field_type = decompile_type_expr field_type in
|
||||
let variant : CST.field_decl = {field_name;colon;field_type} in
|
||||
ok @@ wrap variant
|
||||
ok @@ wrap variant
|
||||
in
|
||||
let%bind record = bind_map_list aux record in
|
||||
let%bind record = list_to_nsepseq record in
|
||||
@ -97,27 +97,27 @@ let rec decompile_type_expr : AST.type_expression -> _ result = fun te ->
|
||||
failwith "let's work on it later"
|
||||
|
||||
let get_e_variable : AST.expression -> _ result = fun expr ->
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_variable var -> ok @@ var.wrap_content
|
||||
| _ -> failwith @@
|
||||
| _ -> failwith @@
|
||||
Format.asprintf "%a should be a variable expression"
|
||||
AST.PP.expression expr
|
||||
|
||||
let get_e_tuple : AST.expression -> _ result = fun expr ->
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_tuple tuple -> ok @@ tuple
|
||||
| E_variable _
|
||||
| E_literal _
|
||||
| E_constant _
|
||||
| E_lambda _ -> ok @@ [expr]
|
||||
| _ -> failwith @@
|
||||
| _ -> failwith @@
|
||||
Format.asprintf "%a should be a tuple expression"
|
||||
AST.PP.expression expr
|
||||
|
||||
let pattern_type var ty_opt =
|
||||
let var = CST.PVar (decompile_variable var) in
|
||||
match ty_opt with
|
||||
Some s ->
|
||||
Some s ->
|
||||
let%bind type_expr = decompile_type_expr s in
|
||||
ok @@ CST.PTyped (wrap @@ CST.{pattern=var;colon=rg;type_expr})
|
||||
| None -> ok @@ var
|
||||
@ -126,7 +126,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
let return_expr expr = ok @@ expr in
|
||||
let return_expr_with_par expr = return_expr @@ CST.EPar (wrap @@ par @@ expr) in
|
||||
match expr.expression_content with
|
||||
E_variable name ->
|
||||
E_variable name ->
|
||||
let var = decompile_variable name.wrap_content in
|
||||
return_expr @@ CST.EVar (var)
|
||||
| E_constant {cons_name; arguments} ->
|
||||
@ -145,7 +145,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
Literal_unit -> return_expr @@ CST.EUnit (wrap (rg,rg))
|
||||
| Literal_int i -> return_expr @@ CST.EArith (Int (wrap ("",i)))
|
||||
| Literal_nat n -> return_expr @@ CST.EArith (Nat (wrap ("",n)))
|
||||
| Literal_timestamp time ->
|
||||
| Literal_timestamp time ->
|
||||
let time = Tezos_utils.Time.Protocol.to_notation @@
|
||||
Tezos_utils.Time.Protocol.of_seconds @@ Z.to_int64 time in
|
||||
(* TODO combinators for CSTs. *)
|
||||
@ -175,15 +175,15 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
let kh = CST.EString (String (wrap kh)) in
|
||||
let%bind ty = decompile_type_expr @@ AST.t_key_hash () in
|
||||
return_expr @@ CST.EAnnot (wrap @@ par (kh,rg,ty))
|
||||
| Literal_chain_id _
|
||||
| Literal_chain_id _
|
||||
| Literal_void
|
||||
| Literal_operation _ ->
|
||||
failwith "chain_id, void, operation are not created currently ?"
|
||||
)
|
||||
| E_application {lamb;args} ->
|
||||
let%bind lamb = decompile_expression lamb in
|
||||
let%bind args = map List.Ne.of_list @@
|
||||
bind (bind_map_list decompile_expression) @@
|
||||
let%bind args = map List.Ne.of_list @@
|
||||
bind (bind_map_list decompile_expression) @@
|
||||
get_e_tuple args
|
||||
in
|
||||
return_expr @@ CST.ECall (wrap (lamb,args))
|
||||
@ -220,7 +220,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
return_expr @@ CST.ECase (wrap cases)
|
||||
| E_record record ->
|
||||
let record = AST.LMap.to_kv_list record in
|
||||
let aux (AST.Label str, expr) =
|
||||
let aux (AST.Label str, expr) =
|
||||
let field_name = wrap str in
|
||||
let%bind field_expr = decompile_expression expr in
|
||||
let field : CST.field_assign = {field_name;assignment=rg;field_expr} in
|
||||
@ -256,10 +256,10 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
| E_update {record={expression_content=E_update _;_} as record;path;update} ->
|
||||
let%bind record = decompile_expression record in
|
||||
let%bind (record,updates) = match record with
|
||||
CST.EUpdate {value;_} -> ok @@ (value.record,value.updates)
|
||||
| _ -> failwith @@ Format.asprintf "Inpossible case %a" AST.PP.expression expr
|
||||
CST.EUpdate {value;_} -> ok @@ (value.record,value.updates)
|
||||
| _ -> failwith @@ Format.asprintf "Inpossible case %a" AST.PP.expression expr
|
||||
in
|
||||
let%bind var,path = match path with
|
||||
let%bind var,path = match path with
|
||||
Access_record var::path -> ok @@ (var,path)
|
||||
| _ -> failwith "Impossible case %a"
|
||||
in
|
||||
@ -275,7 +275,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
let%bind field_expr = decompile_expression update in
|
||||
let (struct_name,field_path) = List.Ne.of_list path in
|
||||
(match field_path with
|
||||
[] ->
|
||||
[] ->
|
||||
(match struct_name with
|
||||
Access_record name ->
|
||||
let record : CST.path = Name record in
|
||||
@ -299,7 +299,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
| _ ->
|
||||
let%bind struct_name = match struct_name with
|
||||
Access_record name -> ok @@ wrap name
|
||||
| Access_tuple i -> ok @@ wrap @@ Z.to_string i
|
||||
| Access_tuple i -> ok @@ wrap @@ Z.to_string i
|
||||
| Access_map _ -> failwith @@ Format.asprintf "invalid map update %a" AST.PP.expression expr
|
||||
in
|
||||
(match List.rev field_path with
|
||||
@ -347,7 +347,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
let%bind map = bind_map_list (bind_map_pair decompile_expression) map in
|
||||
let aux (k,v) = CST.ETuple (wrap (k,[(rg,v)])) in
|
||||
let map = List.map aux map in
|
||||
(match map with
|
||||
(match map with
|
||||
[] -> return_expr @@ CST.EVar (wrap "Big_map.empty")
|
||||
| _ ->
|
||||
let var = CST.EVar (wrap "Map.literal") in
|
||||
@ -357,7 +357,7 @@ let rec decompile_expression : AST.expression -> _ result = fun expr ->
|
||||
let%bind big_map = bind_map_list (bind_map_pair decompile_expression) big_map in
|
||||
let aux (k,v) = CST.ETuple (wrap (k,[(rg,v)])) in
|
||||
let big_map = List.map aux big_map in
|
||||
(match big_map with
|
||||
(match big_map with
|
||||
[] -> return_expr @@ CST.EVar (wrap "Big_map.empty")
|
||||
| _ ->
|
||||
let var = CST.EVar (wrap "Big_map.literal") in
|
||||
@ -409,28 +409,28 @@ and decompile_attributes = function
|
||||
true -> [wrap "inline"]
|
||||
| false -> []
|
||||
|
||||
and decompile_matching_cases : AST.matching_expr -> ((CST.expr CST.case_clause Region.reg, Region.t) Parser_shared.Utils.nsepseq Region.reg,_) result =
|
||||
and decompile_matching_cases : AST.matching_expr -> ((CST.expr CST.case_clause Region.reg, Region.t) Simple_utils.Utils.nsepseq Region.reg,_) result =
|
||||
fun m ->
|
||||
let%bind cases = match m with
|
||||
Match_variable (var, ty_opt, expr) ->
|
||||
let%bind pattern = pattern_type var.wrap_content ty_opt in
|
||||
let%bind pattern = pattern_type var.wrap_content ty_opt in
|
||||
let%bind rhs = decompile_expression expr in
|
||||
let case : _ CST.case_clause = {pattern; arrow=rg; rhs}in
|
||||
ok @@ [wrap case]
|
||||
| Match_tuple (lst, ty_opt, expr) ->
|
||||
let%bind tuple = match ty_opt with
|
||||
let%bind tuple = match ty_opt with
|
||||
Some ty_lst ->
|
||||
let aux (var, ty) =
|
||||
let aux (var, ty) =
|
||||
let pattern = CST.PVar (decompile_variable var) in
|
||||
let%bind type_expr = decompile_type_expr ty in
|
||||
ok @@ CST.PTyped (wrap @@ CST.{pattern;colon=rg;type_expr})
|
||||
ok @@ CST.PTyped (wrap @@ CST.{pattern;colon=rg;type_expr})
|
||||
in
|
||||
bind list_to_nsepseq @@ bind_map_list aux @@ List.combine (List.map (fun (e:AST.expression_variable) -> e.wrap_content) lst) ty_lst
|
||||
| None ->
|
||||
let aux (var:AST.expression_variable) = CST.PVar (decompile_variable var.wrap_content) in
|
||||
list_to_nsepseq @@ List.map aux lst
|
||||
list_to_nsepseq @@ List.map aux lst
|
||||
in
|
||||
let pattern : CST.pattern = PTuple (wrap @@ tuple) in
|
||||
let pattern : CST.pattern = PTuple (wrap @@ tuple) in
|
||||
let%bind rhs = decompile_expression expr in
|
||||
let case : _ CST.case_clause = {pattern; arrow=rg; rhs}in
|
||||
ok @@ [wrap case]
|
||||
@ -453,7 +453,7 @@ fun m ->
|
||||
let nil_case : _ CST.case_clause = {pattern=PList (PListComp (wrap @@ inject brackets None));arrow=rg; rhs} in
|
||||
ok @@ [wrap cons_case; wrap nil_case]
|
||||
| Match_variant lst ->
|
||||
let aux ((c,(v:AST.expression_variable)),e) =
|
||||
let aux ((c,(v:AST.expression_variable)),e) =
|
||||
let AST.Constructor c = c in
|
||||
let constr = wrap @@ c in
|
||||
let var : CST.pattern = PVar (decompile_variable v.wrap_content) in
|
||||
|
@ -584,7 +584,6 @@ and compile_instruction : ?next: AST.expression -> CST.instruction -> _ result
|
||||
let%bind (_, var, path) = compile_path mlu.path in
|
||||
let%bind index = compile_expression @@ mlu.index.value.inside in
|
||||
ok @@ (var, path @ [Access_map index])
|
||||
|
||||
in
|
||||
match instruction with
|
||||
Cond c ->
|
||||
@ -747,8 +746,8 @@ and compile_block : ?next:AST.expression -> CST.block CST.reg -> _ result = fun
|
||||
let (block', _loc) = r_split block in
|
||||
let statements = npseq_to_list block'.statements in
|
||||
let aux (next,attr) statement =
|
||||
let%bind (statement, attr) = compile_statement ?next attr statement in
|
||||
return (statement,attr)
|
||||
let%bind (statement, attr) = compile_statement ?next attr statement
|
||||
in return (statement,attr)
|
||||
in
|
||||
let%bind (block', _) = bind_fold_right_list aux (next,None) statements in
|
||||
match block' with
|
||||
|
@ -13,10 +13,10 @@ let wrap : type a. a -> a CST.reg = fun value -> {value;region=rg}
|
||||
let list_to_sepseq lst =
|
||||
match lst with
|
||||
[] -> None
|
||||
| hd :: lst ->
|
||||
| hd :: lst ->
|
||||
let aux e = (rg, e) in
|
||||
Some (hd, List.map aux lst)
|
||||
let list_to_nsepseq lst =
|
||||
let list_to_nsepseq lst =
|
||||
match list_to_sepseq lst with
|
||||
Some s -> ok @@ s
|
||||
| None -> failwith "List is not a non_empty list"
|
||||
@ -40,7 +40,7 @@ let decompile_variable : type a. a Var.t -> CST.variable = fun var ->
|
||||
if String.contains var '#' then
|
||||
let var = String.split_on_char '#' var in
|
||||
wrap @@ "gen__" ^ (String.concat "" var)
|
||||
else
|
||||
else
|
||||
if String.length var > 4 && String.equal "gen__" @@ String.sub var 0 5 then
|
||||
wrap @@ "user__" ^ var
|
||||
else
|
||||
@ -52,23 +52,23 @@ let rec decompile_type_expr : AST.type_expression -> _ result = fun te ->
|
||||
T_sum sum ->
|
||||
let sum = AST.CMap.to_kv_list sum in
|
||||
let aux (AST.Constructor c, AST.{ctor_type;_}) =
|
||||
let constr = wrap c in
|
||||
let constr = wrap c in
|
||||
let%bind arg = decompile_type_expr ctor_type in
|
||||
let arg = Some (rg, arg) in
|
||||
let variant : CST.variant = {constr;arg} in
|
||||
ok @@ wrap variant
|
||||
ok @@ wrap variant
|
||||
in
|
||||
let%bind sum = bind_map_list aux sum in
|
||||
let%bind sum = list_to_nsepseq sum in
|
||||
return @@ CST.TSum (wrap sum)
|
||||
| T_record record ->
|
||||
| T_record record ->
|
||||
let record = AST.LMap.to_kv_list record in
|
||||
let aux (AST.Label c, AST.{field_type;_}) =
|
||||
let field_name = wrap c in
|
||||
let field_name = wrap c in
|
||||
let colon = rg in
|
||||
let%bind field_type = decompile_type_expr field_type in
|
||||
let variant : CST.field_decl = {field_name;colon;field_type} in
|
||||
ok @@ wrap variant
|
||||
ok @@ wrap variant
|
||||
in
|
||||
let%bind record = bind_map_list aux record in
|
||||
let%bind record = list_to_nsepseq record in
|
||||
@ -98,30 +98,30 @@ let rec decompile_type_expr : AST.type_expression -> _ result = fun te ->
|
||||
failwith "let's work on it later"
|
||||
|
||||
let get_e_variable : AST.expression -> _ result = fun expr ->
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_variable var -> ok @@ var.wrap_content
|
||||
| _ -> failwith @@
|
||||
| _ -> failwith @@
|
||||
Format.asprintf "%a should be a variable expression"
|
||||
AST.PP.expression expr
|
||||
|
||||
|
||||
let rec get_e_accessor : AST.expression -> _ result = fun expr ->
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_variable var -> ok @@ (var, [])
|
||||
| E_accessor {record;path} ->
|
||||
let%bind (var, lst) = get_e_accessor record in
|
||||
ok @@ (var, lst @ path)
|
||||
| _ -> failwith @@
|
||||
| _ -> failwith @@
|
||||
Format.asprintf "%a should be a variable expression"
|
||||
AST.PP.expression expr
|
||||
|
||||
let get_e_tuple : AST.expression -> _ result = fun expr ->
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_tuple tuple -> ok @@ tuple
|
||||
| E_variable _
|
||||
| E_literal _
|
||||
| E_constant _
|
||||
| E_lambda _ -> ok @@ [expr]
|
||||
| _ -> failwith @@
|
||||
| _ -> failwith @@
|
||||
Format.asprintf "%a should be a tuple expression"
|
||||
AST.PP.expression expr
|
||||
type eos =
|
||||
@ -138,7 +138,7 @@ let statements_of_expression : CST.expr -> CST.statement List.Ne.t option = fun
|
||||
let rec decompile_expression : AST.expression -> _ result = fun e ->
|
||||
let%bind (block,expr) = decompile_to_block e in
|
||||
match expr with
|
||||
Some expr ->
|
||||
Some expr ->
|
||||
( match block with
|
||||
Some block ->
|
||||
let block = wrap @@ block in
|
||||
@ -146,7 +146,7 @@ let rec decompile_expression : AST.expression -> _ result = fun e ->
|
||||
| None -> ok @@ expr
|
||||
)
|
||||
| None ->
|
||||
failwith @@ Format.asprintf
|
||||
failwith @@ Format.asprintf
|
||||
"An expression was expected, but this was decompile to statements. \n
|
||||
Expr : %a
|
||||
Loc : %a"
|
||||
@ -157,8 +157,8 @@ and decompile_statements : AST.expression -> _ result = fun expr ->
|
||||
let%bind (stat,_) = decompile_eos Statements expr in
|
||||
match stat with
|
||||
Some stat -> ok @@ stat
|
||||
| None ->
|
||||
failwith @@ Format.asprintf
|
||||
| None ->
|
||||
failwith @@ Format.asprintf
|
||||
"Statements was expected, but this was decompile to expression. \n
|
||||
Expr : %a
|
||||
Loc : %a"
|
||||
@ -185,7 +185,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
let return_stat_ez stat = return_stat @@ (stat, []) in
|
||||
let return_inst inst = return_stat_ez @@ CST.Instr inst in
|
||||
match expr.expression_content with
|
||||
E_variable name ->
|
||||
E_variable name ->
|
||||
let var = decompile_variable name.wrap_content in
|
||||
return_expr @@ CST.EVar (var)
|
||||
| E_constant {cons_name; arguments} ->
|
||||
@ -195,7 +195,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
| _ ->
|
||||
let%bind arguments = decompile_to_tuple_expr arguments in
|
||||
let const : CST.fun_call = wrap (expr, arguments) in
|
||||
(match output with
|
||||
(match output with
|
||||
Expression -> return_expr (CST.ECall const)
|
||||
| Statements -> return_inst (CST.ProcCall const)
|
||||
)
|
||||
@ -205,7 +205,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
Literal_unit -> return_expr @@ CST.EUnit rg
|
||||
| Literal_int i -> return_expr @@ CST.EArith (Int (wrap ("",i)))
|
||||
| Literal_nat n -> return_expr @@ CST.EArith (Nat (wrap ("",n)))
|
||||
| Literal_timestamp time ->
|
||||
| Literal_timestamp time ->
|
||||
let time = Tezos_utils.Time.Protocol.to_notation @@
|
||||
Tezos_utils.Time.Protocol.of_seconds @@ Z.to_int64 time in
|
||||
(* TODO combinators for CSTs. *)
|
||||
@ -235,7 +235,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
let kh = CST.EString (String (wrap kh)) in
|
||||
let%bind ty = decompile_type_expr @@ AST.t_key_hash () in
|
||||
return_expr @@ CST.EAnnot (wrap @@ par (kh,rg,ty))
|
||||
| Literal_chain_id _
|
||||
| Literal_chain_id _
|
||||
| Literal_void
|
||||
| Literal_operation _ ->
|
||||
failwith "chain_id, void, operation are not created currently ?"
|
||||
@ -244,7 +244,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
let%bind lamb = decompile_expression lamb in
|
||||
let%bind args = bind decompile_to_tuple_expr @@ get_e_tuple args in
|
||||
(match output with
|
||||
Expression ->
|
||||
Expression ->
|
||||
return_expr @@ CST.ECall (wrap (lamb,args))
|
||||
| Statements ->
|
||||
return_inst @@ CST.ProcCall (wrap (lamb,args))
|
||||
@ -281,7 +281,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
| E_let_in {let_binder;rhs;let_result;inline} ->
|
||||
let%bind lin = decompile_to_data_decl let_binder rhs inline in
|
||||
let%bind (lst, expr) = decompile_eos Expression let_result in
|
||||
let lst = match lst with
|
||||
let lst = match lst with
|
||||
Some lst -> List.Ne.cons (CST.Data lin) lst
|
||||
| None -> (CST.Data lin, [])
|
||||
in
|
||||
@ -310,7 +310,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
)
|
||||
| E_record record ->
|
||||
let record = AST.LMap.to_kv_list record in
|
||||
let aux (AST.Label str, expr) =
|
||||
let aux (AST.Label str, expr) =
|
||||
let field_name = wrap str in
|
||||
let%bind field_expr = decompile_expression expr in
|
||||
let field : CST.field_assignment = {field_name;assignment=rg;field_expr} in
|
||||
@ -350,10 +350,10 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
| E_update {record={expression_content=E_update _;_} as record;path;update} ->
|
||||
let%bind record = decompile_expression record in
|
||||
let%bind (record,updates) = match record with
|
||||
CST.EUpdate {value;_} -> ok @@ (value.record,value.updates)
|
||||
| _ -> failwith @@ Format.asprintf "Inpossible case %a" AST.PP.expression expr
|
||||
CST.EUpdate {value;_} -> ok @@ (value.record,value.updates)
|
||||
| _ -> failwith @@ Format.asprintf "Inpossible case %a" AST.PP.expression expr
|
||||
in
|
||||
let%bind var,path = match path with
|
||||
let%bind var,path = match path with
|
||||
Access_record var::path -> ok @@ (var,path)
|
||||
| _ -> failwith "Impossible case %a"
|
||||
in
|
||||
@ -369,7 +369,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
let%bind field_expr = decompile_expression update in
|
||||
let (struct_name,field_path) = List.Ne.of_list path in
|
||||
(match field_path with
|
||||
[] ->
|
||||
[] ->
|
||||
(match struct_name with
|
||||
Access_record name ->
|
||||
let record : CST.path = Name record in
|
||||
@ -483,7 +483,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
let var = decompile_variable @@ (fst binder).wrap_content in
|
||||
let bind_to = Option.map (fun (x:AST.expression_variable) -> (rg,decompile_variable x.wrap_content)) @@ snd binder in
|
||||
let%bind expr = decompile_expression collection in
|
||||
let collection = match collection_type with
|
||||
let collection = match collection_type with
|
||||
Map -> CST.Map rg | Set -> Set rg | List -> List rg in
|
||||
let%bind (block,_next) = decompile_to_block body in
|
||||
let block = wrap @@ Option.unopt ~default:(empty_block) block in
|
||||
@ -498,7 +498,7 @@ and decompile_eos : eos -> AST.expression -> ((CST.statement List.Ne.t option)*
|
||||
|
||||
and decompile_if_clause : AST.expression -> (CST.if_clause, _) result = fun e ->
|
||||
let%bind clause = decompile_statements e in
|
||||
match clause with
|
||||
match clause with
|
||||
CST.Instr instr,[] ->
|
||||
ok @@ CST.ClauseInstr instr
|
||||
| _ ->
|
||||
@ -508,12 +508,12 @@ and decompile_if_clause : AST.expression -> (CST.if_clause, _) result = fun e ->
|
||||
and decompile_to_data_decl : (AST.expression_variable * AST.type_expression option) -> AST.expression -> bool -> (CST.data_decl, _) result = fun (name,ty_opt) expr inline ->
|
||||
let name = decompile_variable name.wrap_content in
|
||||
let%bind const_type = bind_map_option (bind_compose (ok <@ prefix_colon) decompile_type_expr) ty_opt in
|
||||
let attributes : CST.attr_decl option = match inline with
|
||||
let attributes : CST.attr_decl option = match inline with
|
||||
true -> Some (wrap @@ ne_inject (NEInjAttr rg) @@ (wrap @@ "inline",[]))
|
||||
| false -> None
|
||||
in
|
||||
let fun_name = name in
|
||||
match expr.expression_content with
|
||||
match expr.expression_content with
|
||||
E_lambda lambda ->
|
||||
let%bind (param,ret_type,return) = decompile_lambda lambda in
|
||||
let fun_decl : CST.fun_decl = {kwd_recursive=None;kwd_function=rg;fun_name;param;ret_type;kwd_is=rg;return;terminator=Some rg;attributes} in
|
||||
@ -531,14 +531,14 @@ and decompile_to_data_decl : (AST.expression_variable * AST.type_expression opti
|
||||
and decompile_to_lhs : AST.expression_variable -> AST.access list -> (CST.lhs, _) result = fun var access ->
|
||||
match List.rev access with
|
||||
[] -> ok @@ (CST.Path (Name (decompile_variable var.wrap_content)) : CST.lhs)
|
||||
| hd :: tl ->
|
||||
match hd with
|
||||
| hd :: tl ->
|
||||
match hd with
|
||||
| AST.Access_map e ->
|
||||
let%bind path = decompile_to_path var @@ List.rev tl in
|
||||
let%bind index = map (wrap <@ brackets) @@ decompile_expression e in
|
||||
let mlu: CST.map_lookup = {path;index} in
|
||||
ok @@ CST.MapPath (wrap @@ mlu)
|
||||
| _ ->
|
||||
| _ ->
|
||||
let%bind path = decompile_to_path var @@ access in
|
||||
ok @@ (CST.Path (path) : CST.lhs)
|
||||
|
||||
@ -570,18 +570,18 @@ and decompile_lambda : AST.lambda -> _ = fun {binder;input_type;output_type;resu
|
||||
let%bind return = decompile_expression result in
|
||||
ok @@ (param,ret_type,return)
|
||||
|
||||
and decompile_matching_expr : type a.(AST.expr ->(a,_) result) -> AST.matching_expr -> ((a CST.case_clause Region.reg, Region.t) Parser_shared.Utils.nsepseq Region.reg,_) result =
|
||||
and decompile_matching_expr : type a.(AST.expr ->(a,_) result) -> AST.matching_expr -> ((a CST.case_clause Region.reg, Region.t) Simple_utils.Utils.nsepseq Region.reg,_) result =
|
||||
fun f m ->
|
||||
let%bind cases = match m with
|
||||
Match_variable (var, _ty_opt, expr) ->
|
||||
let pattern : CST.pattern = PVar (decompile_variable var.wrap_content) in
|
||||
let pattern : CST.pattern = PVar (decompile_variable var.wrap_content) in
|
||||
let%bind rhs = f expr in
|
||||
let case : _ CST.case_clause = {pattern; arrow=rg; rhs}in
|
||||
ok @@ [wrap case]
|
||||
| Match_tuple (lst, _ty_opt, expr) ->
|
||||
let aux (var:AST.expression_variable) = CST.PVar (decompile_variable var.wrap_content) in
|
||||
let%bind tuple = list_to_nsepseq @@ List.map aux lst in
|
||||
let pattern : CST.pattern = PTuple (wrap @@ par @@ tuple) in
|
||||
let pattern : CST.pattern = PTuple (wrap @@ par @@ tuple) in
|
||||
let%bind rhs = f expr in
|
||||
let case : _ CST.case_clause = {pattern; arrow=rg; rhs}in
|
||||
ok @@ [wrap case]
|
||||
@ -604,7 +604,7 @@ fun f m ->
|
||||
let nil_case : _ CST.case_clause = {pattern=PList (PNil rg);arrow=rg; rhs} in
|
||||
ok @@ [wrap cons_case; wrap nil_case]
|
||||
| Match_variant lst ->
|
||||
let aux ((c,(v:AST.expression_variable)),e) =
|
||||
let aux ((c,(v:AST.expression_variable)),e) =
|
||||
let AST.Constructor c = c in
|
||||
let constr = wrap @@ c in
|
||||
let var : CST.pattern = PVar (decompile_variable v.wrap_content) in
|
||||
@ -630,9 +630,9 @@ let decompile_declaration : AST.declaration Location.wrap -> (CST.declaration, _
|
||||
ok @@ CST.TypeDecl (wrap (CST.{kwd_type; name; kwd_is; type_expr; terminator}))
|
||||
| Declaration_constant (var, ty_opt, inline, expr) ->
|
||||
let attributes = match inline with
|
||||
true ->
|
||||
true ->
|
||||
let attr = wrap "inline" in
|
||||
let ne_inj : _ CST.ne_injection =
|
||||
let ne_inj : _ CST.ne_injection =
|
||||
{kind=NEInjAttr rg;enclosing=End rg;ne_elements=(attr, []);terminator=Some rg} in
|
||||
let attr_decl = wrap ne_inj in
|
||||
Some attr_decl
|
||||
|
@ -15,7 +15,7 @@ type abs_error = [
|
||||
| `Concrete_pascaligo_unsupported_string_singleton of Raw.type_expr
|
||||
| `Concrete_pascaligo_unsupported_deep_some_pattern of Raw.pattern
|
||||
| `Concrete_pascaligo_unsupported_deep_list_pattern of Raw.pattern
|
||||
| `Concrete_pascaligo_unsupported_deep_tuple_pattern of (Raw.pattern, Raw.wild) Parser_shared.Utils.nsepseq Raw.par Raw.reg
|
||||
| `Concrete_pascaligo_unsupported_deep_tuple_pattern of (Raw.pattern, Raw.wild) Simple_utils.Utils.nsepseq Raw.par Raw.reg
|
||||
| `Concrete_pascaligo_unknown_built_in of string
|
||||
| `Concrete_pascaligo_michelson_type_wrong of Raw.type_expr * string
|
||||
| `Concrete_pascaligo_michelson_type_wrong_arity of Location.t * string
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
(* Utilities *)
|
||||
|
||||
module Utils = Simple_utils.Utils
|
||||
open Utils
|
||||
|
||||
(* Regions
|
||||
@ -23,6 +24,10 @@ module Region = Simple_utils.Region
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
(* Lexemes *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
(* Keywords of OCaml *)
|
||||
|
||||
type keyword = Region.t
|
||||
@ -169,7 +174,7 @@ and type_expr =
|
||||
| TFun of (type_expr * arrow * type_expr) reg
|
||||
| TPar of type_expr par reg
|
||||
| TVar of variable
|
||||
| TString of Lexer.lexeme reg
|
||||
| TString of lexeme reg
|
||||
|
||||
and cartesian = (type_expr, times) nsepseq reg
|
||||
|
||||
@ -192,9 +197,9 @@ and pattern =
|
||||
| PFalse of kwd_false
|
||||
| PTrue of kwd_true
|
||||
| PVar of variable
|
||||
| PInt of (Lexer.lexeme * Z.t) reg
|
||||
| PNat of (Lexer.lexeme * Z.t) reg
|
||||
| PBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| PInt of (lexeme * Z.t) reg
|
||||
| PNat of (lexeme * Z.t) reg
|
||||
| PBytes of (lexeme * Hex.t) reg
|
||||
| PString of string reg
|
||||
| PVerbatim of string reg
|
||||
| PWild of wild
|
||||
|
@ -1 +0,0 @@
|
||||
include CST
|
@ -2,12 +2,11 @@
|
||||
(name cst_cameligo)
|
||||
(public_name ligo.cst.cameligo)
|
||||
(libraries
|
||||
;; Ligo
|
||||
simple-utils
|
||||
tezos-utils
|
||||
parser_shared
|
||||
)
|
||||
;; Third party
|
||||
hex
|
||||
zarith)
|
||||
(modules CST ParserLog)
|
||||
(preprocess
|
||||
(pps ppx_let bisect_ppx --conditional)
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Simple_utils ))
|
||||
)
|
||||
(pps ppx_let bisect_ppx --conditional)))
|
||||
|
@ -2,13 +2,7 @@
|
||||
(name cst)
|
||||
(public_name ligo.cst)
|
||||
(libraries
|
||||
simple-utils
|
||||
tezos-utils
|
||||
cst_cameligo
|
||||
cst_pascaligo
|
||||
)
|
||||
cst_pascaligo)
|
||||
(preprocess
|
||||
(pps ppx_let bisect_ppx --conditional)
|
||||
)
|
||||
(flags (:standard -open Simple_utils ))
|
||||
)
|
||||
(pps ppx_let bisect_ppx --conditional)))
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
(* Utilities *)
|
||||
|
||||
module Utils = Simple_utils.Utils
|
||||
open Utils
|
||||
|
||||
(* Regions
|
||||
@ -23,6 +24,10 @@ module Region = Simple_utils.Region
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
(* Lexemes *)
|
||||
|
||||
type lexeme = string
|
||||
|
||||
(* Keywords of LIGO *)
|
||||
|
||||
type keyword = Region.t
|
||||
@ -185,7 +190,7 @@ and type_expr =
|
||||
| TFun of (type_expr * arrow * type_expr) reg
|
||||
| TPar of type_expr par reg
|
||||
| TVar of variable
|
||||
| TString of Lexer.lexeme reg
|
||||
| TString of lexeme reg
|
||||
|
||||
and cartesian = (type_expr, times) nsepseq reg
|
||||
|
||||
@ -227,7 +232,7 @@ and fun_decl = {
|
||||
and block_with = {
|
||||
block : block reg;
|
||||
kwd_with : kwd_with;
|
||||
expr : expr;
|
||||
expr : expr
|
||||
}
|
||||
|
||||
and parameters = (param_decl, semi) nsepseq par reg
|
||||
@ -460,15 +465,15 @@ and expr =
|
||||
| EProj of projection reg
|
||||
| EUpdate of update reg
|
||||
| EMap of map_expr
|
||||
| EVar of Lexer.lexeme reg
|
||||
| EVar of lexeme reg
|
||||
| ECall of fun_call
|
||||
| EBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| EBytes of (lexeme * Hex.t) reg
|
||||
| EUnit of c_Unit
|
||||
| ETuple of tuple_expr
|
||||
| EPar of expr par reg
|
||||
| EFun of fun_expr reg
|
||||
| ECodeInj of code_inj reg
|
||||
| EBlock of block_with reg
|
||||
| EBlock of block_with reg
|
||||
|
||||
and annot_expr = expr * colon * type_expr
|
||||
|
||||
@ -527,14 +532,14 @@ and arith_expr =
|
||||
| Div of slash bin_op reg
|
||||
| Mod of kwd_mod bin_op reg
|
||||
| Neg of minus un_op reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
| Nat of (Lexer.lexeme * Z.t) reg
|
||||
| Mutez of (Lexer.lexeme * Z.t) reg
|
||||
| Int of (lexeme * Z.t) reg
|
||||
| Nat of (lexeme * Z.t) reg
|
||||
| Mutez of (lexeme * Z.t) reg
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
| String of Lexer.lexeme reg
|
||||
| Verbatim of Lexer.lexeme reg
|
||||
| String of lexeme reg
|
||||
| Verbatim of lexeme reg
|
||||
|
||||
and list_expr =
|
||||
ECons of cons bin_op reg
|
||||
@ -574,7 +579,7 @@ and field_path_assignment = {
|
||||
|
||||
and selection =
|
||||
FieldName of field_name
|
||||
| Component of (Lexer.lexeme * Z.t) reg
|
||||
| Component of (lexeme * Z.t) reg
|
||||
|
||||
and tuple_expr = (expr, comma) nsepseq par reg
|
||||
|
||||
@ -618,12 +623,12 @@ and ne_injection_kwd =
|
||||
|
||||
and pattern =
|
||||
PConstr of constr_pattern
|
||||
| PVar of Lexer.lexeme reg
|
||||
| PVar of lexeme reg
|
||||
| PWild of wild
|
||||
| PInt of (Lexer.lexeme * Z.t) reg
|
||||
| PNat of (Lexer.lexeme * Z.t) reg
|
||||
| PBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| PString of Lexer.lexeme reg
|
||||
| PInt of (lexeme * Z.t) reg
|
||||
| PNat of (lexeme * Z.t) reg
|
||||
| PBytes of (lexeme * Hex.t) reg
|
||||
| PString of lexeme reg
|
||||
| PList of list_pattern
|
||||
| PTuple of tuple_pattern
|
||||
|
||||
|
@ -64,7 +64,7 @@ let print_sepseq :
|
||||
None -> ()
|
||||
| Some seq -> print_nsepseq state sep print seq
|
||||
|
||||
let print_option : state -> (state -> 'a -> unit ) -> 'a option -> unit =
|
||||
let print_option : state -> (state -> 'a -> unit) -> 'a option -> unit =
|
||||
fun state print -> function
|
||||
None -> ()
|
||||
| Some opt -> print state opt
|
||||
@ -141,7 +141,7 @@ and print_const_decl state {value; _} =
|
||||
equal; init; terminator; _} = value in
|
||||
print_token state kwd_const "const";
|
||||
print_var state name;
|
||||
print_option state print_colon_type_expr const_type;
|
||||
print_option state print_type_annot const_type;
|
||||
print_token state equal "=";
|
||||
print_expr state init;
|
||||
print_terminator state terminator
|
||||
@ -165,7 +165,7 @@ and print_type_expr state = function
|
||||
| TVar type_var -> print_var state type_var
|
||||
| TString str -> print_string state str
|
||||
|
||||
and print_colon_type_expr state (colon, type_expr) =
|
||||
and print_type_annot state (colon, type_expr) =
|
||||
print_token state colon ":";
|
||||
print_type_expr state type_expr;
|
||||
|
||||
@ -223,7 +223,7 @@ and print_fun_decl state {value; _} =
|
||||
print_token state kwd_function "function";
|
||||
print_var state fun_name;
|
||||
print_parameters state param;
|
||||
print_option state print_colon_type_expr ret_type;
|
||||
print_option state print_type_annot ret_type;
|
||||
print_token state kwd_is "is";
|
||||
print_expr state return;
|
||||
print_terminator state terminator;
|
||||
@ -233,7 +233,7 @@ and print_fun_expr state {value; _} =
|
||||
ret_type; kwd_is; return} : fun_expr = value in
|
||||
print_token state kwd_function "function";
|
||||
print_parameters state param;
|
||||
print_option state print_colon_type_expr ret_type;
|
||||
print_option state print_type_annot ret_type;
|
||||
print_token state kwd_is "is";
|
||||
print_expr state return
|
||||
|
||||
@ -255,9 +255,9 @@ and print_block_expr state {value; _} =
|
||||
|
||||
and print_parameters state {value; _} =
|
||||
let {lpar; inside; rpar} = value in
|
||||
print_token state lpar "(";
|
||||
print_token state lpar "(";
|
||||
print_nsepseq state ";" print_param_decl inside;
|
||||
print_token state rpar ")"
|
||||
print_token state rpar ")"
|
||||
|
||||
and print_param_decl state = function
|
||||
ParamConst param_const -> print_param_const state param_const
|
||||
@ -265,15 +265,15 @@ and print_param_decl state = function
|
||||
|
||||
and print_param_const state {value; _} =
|
||||
let {kwd_const; var; param_type} = value in
|
||||
print_token state kwd_const "const";
|
||||
print_var state var;
|
||||
print_option state print_colon_type_expr param_type
|
||||
print_token state kwd_const "const";
|
||||
print_var state var;
|
||||
print_option state print_type_annot param_type
|
||||
|
||||
and print_param_var state {value; _} =
|
||||
let {kwd_var; var; param_type} = value in
|
||||
print_token state kwd_var "var";
|
||||
print_var state var;
|
||||
print_option state print_colon_type_expr param_type
|
||||
print_option state print_type_annot param_type
|
||||
|
||||
and print_block state block =
|
||||
let {enclosing; statements; terminator} = block.value in
|
||||
@ -300,7 +300,7 @@ and print_var_decl state {value; _} =
|
||||
assign; init; terminator} = value in
|
||||
print_token state kwd_var "var";
|
||||
print_var state name;
|
||||
print_option state print_colon_type_expr var_type;
|
||||
print_option state print_type_annot var_type;
|
||||
print_token state assign ":=";
|
||||
print_expr state init;
|
||||
print_terminator state terminator
|
||||
@ -919,35 +919,49 @@ and pp_declaration state = function
|
||||
and pp_attr_decl state = pp_ne_injection pp_string state
|
||||
|
||||
and pp_fun_decl state decl =
|
||||
let arity, start =
|
||||
let kwd_recursive = if decl.kwd_recursive = None then 0 else 1 in
|
||||
let ret_type = if decl.ret_type = None then 0 else 1 in
|
||||
let arity = kwd_recursive + ret_type + 3 in
|
||||
let index = 0 in
|
||||
let index =
|
||||
match decl.kwd_recursive with
|
||||
None -> 4,0
|
||||
| Some _ ->
|
||||
let state = state#pad 5 0 in
|
||||
let () = pp_node state "recursive"
|
||||
in 5,1 in
|
||||
let () =
|
||||
let state = state#pad arity start in
|
||||
pp_ident state decl.fun_name in
|
||||
let () =
|
||||
let state = state#pad arity (start + 1) in
|
||||
None -> index
|
||||
| Some _ -> let state = state#pad arity index in
|
||||
pp_node state "recursive";
|
||||
index + 1 in
|
||||
let index =
|
||||
let state = state#pad arity index in
|
||||
pp_ident state decl.fun_name;
|
||||
index + 1 in
|
||||
let index =
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<parameters>";
|
||||
pp_parameters state decl.param in
|
||||
pp_parameters state decl.param;
|
||||
index + 1 in
|
||||
let index =
|
||||
match decl.ret_type with
|
||||
None -> index
|
||||
| Some (_, t_expr) ->
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<return type>";
|
||||
pp_type_expr (state#pad 1 0) t_expr;
|
||||
index+1 in
|
||||
let () =
|
||||
let state = state#pad arity (start + 2) in
|
||||
pp_node state "<return type>";
|
||||
print_option (state#pad 1 0) pp_type_expr @@ Option.map snd decl.ret_type in
|
||||
let () =
|
||||
let state = state#pad arity (start + 3) in
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<return>";
|
||||
pp_expr (state#pad 1 0) decl.return
|
||||
in ()
|
||||
|
||||
and pp_const_decl state decl =
|
||||
let arity = 3 in
|
||||
pp_ident (state#pad arity 0) decl.name;
|
||||
print_option (state#pad arity 1) pp_type_expr @@ Option.map snd decl.const_type;
|
||||
pp_expr (state#pad arity 2) decl.init
|
||||
let arity = if decl.const_type = None then 2 else 3 in
|
||||
let index = 0 in
|
||||
let index =
|
||||
pp_ident (state#pad arity 0) decl.name; index+1 in
|
||||
let index =
|
||||
pp_type_annot (state#pad arity index) index decl.const_type in
|
||||
let () =
|
||||
pp_expr (state#pad arity index) decl.init
|
||||
in ()
|
||||
|
||||
and pp_type_expr state = function
|
||||
TProd cartesian ->
|
||||
@ -1008,43 +1022,49 @@ and pp_type_tuple state {value; _} =
|
||||
in List.iteri (List.length components |> apply) components
|
||||
|
||||
and pp_fun_expr state (expr: fun_expr) =
|
||||
let () =
|
||||
let state = state#pad 3 0 in
|
||||
let arity = if expr.ret_type = None then 2 else 3 in
|
||||
let index = 0 in
|
||||
let index =
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<parameters>";
|
||||
pp_parameters state expr.param in
|
||||
pp_parameters state expr.param;
|
||||
index + 1 in
|
||||
let index =
|
||||
match expr.ret_type with
|
||||
None -> index
|
||||
| Some (_, t_expr) ->
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<return type>";
|
||||
pp_type_expr (state#pad 1 0) t_expr;
|
||||
index + 1 in
|
||||
let () =
|
||||
let state = state#pad 3 1 in
|
||||
pp_node state "<return type>";
|
||||
print_option (state#pad 1 0) pp_type_expr @@ Option.map snd expr.ret_type in
|
||||
let () =
|
||||
let state = state#pad 3 2 in
|
||||
let state = state#pad arity index in
|
||||
pp_node state "<return>";
|
||||
pp_expr (state#pad 1 0) expr.return
|
||||
in ()
|
||||
|
||||
and pp_code_inj state rc =
|
||||
and pp_code_inj state node =
|
||||
let () =
|
||||
let state = state#pad 2 0 in
|
||||
pp_node state "<language>";
|
||||
pp_string (state#pad 1 0) rc.language.value in
|
||||
pp_string (state#pad 1 0) node.language.value in
|
||||
let () =
|
||||
let state = state#pad 2 1 in
|
||||
pp_node state "<code>";
|
||||
pp_expr (state#pad 1 0) rc.code
|
||||
pp_expr (state#pad 1 0) node.code
|
||||
in ()
|
||||
|
||||
and pp_block_expr state (bw : block_with) =
|
||||
let {block;expr;_}:CST.block_with = bw in
|
||||
and pp_block_expr state node =
|
||||
let {block; expr; _} : block_with = node in
|
||||
let () =
|
||||
let state = state#pad 2 0 in
|
||||
pp_node state "<block>";
|
||||
pp_statements state block.value.statements
|
||||
in
|
||||
pp_statements state block.value.statements in
|
||||
let () =
|
||||
let state = state#pad 2 1 in
|
||||
pp_node state "<expr>";
|
||||
pp_expr (state#pad 1 0) expr in
|
||||
()
|
||||
pp_expr (state#pad 1 0) expr
|
||||
in ()
|
||||
|
||||
and pp_parameters state {value; _} =
|
||||
let params = Utils.nsepseq_to_list value.inside in
|
||||
@ -1054,13 +1074,15 @@ and pp_parameters state {value; _} =
|
||||
|
||||
and pp_param_decl state = function
|
||||
ParamConst {value; region} ->
|
||||
let arity = if value.param_type = None then 1 else 2 in
|
||||
pp_loc_node state "ParamConst" region;
|
||||
pp_ident (state#pad 2 0) value.var;
|
||||
print_option (state#pad 2 1) pp_type_expr @@ Option.map snd value.param_type
|
||||
pp_ident (state#pad arity 0) value.var;
|
||||
ignore (pp_type_annot (state#pad arity 1) 1 value.param_type)
|
||||
| ParamVar {value; region} ->
|
||||
let arity = if value.param_type = None then 1 else 2 in
|
||||
pp_loc_node state "ParamVar" region;
|
||||
pp_ident (state#pad 2 0) value.var;
|
||||
print_option (state#pad 2 1) pp_type_expr @@ Option.map snd value.param_type
|
||||
ignore (pp_type_annot (state#pad arity 1) 1 value.param_type)
|
||||
|
||||
and pp_statements state statements =
|
||||
let statements = Utils.nsepseq_to_list statements in
|
||||
@ -1461,9 +1483,11 @@ and pp_data_decl state = function
|
||||
pp_fun_decl state value
|
||||
|
||||
and pp_var_decl state decl =
|
||||
pp_ident (state#pad 3 0) decl.name;
|
||||
print_option (state#pad 3 1) pp_type_expr @@ Option.map snd decl.var_type;
|
||||
pp_expr (state#pad 3 2) decl.init
|
||||
let arity = if decl.var_type = None then 2 else 3 in
|
||||
let index = 0 in
|
||||
let index = pp_ident (state#pad arity index) decl.name; index+1 in
|
||||
let index = pp_type_annot (state#pad arity index) index decl.var_type
|
||||
in pp_expr (state#pad arity index) decl.init
|
||||
|
||||
and pp_expr state = function
|
||||
ECase {value; region} ->
|
||||
@ -1663,3 +1687,7 @@ and pp_bin_op node region state op =
|
||||
pp_loc_node state node region;
|
||||
pp_expr (state#pad 2 0) op.arg1;
|
||||
pp_expr (state#pad 2 1) op.arg2
|
||||
|
||||
and pp_type_annot state index = function
|
||||
None -> index
|
||||
| Some (_, e) -> pp_type_expr state e; index+1
|
||||
|
@ -2,12 +2,11 @@
|
||||
(name cst_pascaligo)
|
||||
(public_name ligo.cst.pascaligo)
|
||||
(libraries
|
||||
;; Ligo
|
||||
simple-utils
|
||||
tezos-utils
|
||||
parser_shared
|
||||
)
|
||||
;; Third party
|
||||
hex
|
||||
zarith)
|
||||
(modules CST ParserLog)
|
||||
(preprocess
|
||||
(pps ppx_let bisect_ppx --conditional)
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Simple_utils ))
|
||||
)
|
||||
(pps ppx_let bisect_ppx --conditional)))
|
||||
|
@ -1 +0,0 @@
|
||||
include CST
|
14
vendors/Preprocessor/dune
vendored
14
vendors/Preprocessor/dune
vendored
@ -5,11 +5,17 @@
|
||||
(public_name Preprocessor)
|
||||
(wrapped true)
|
||||
(libraries
|
||||
getopt
|
||||
simple-utils)
|
||||
(modules EvalOpt E_Parser E_Lexer E_AST Preproc)
|
||||
getopt
|
||||
simple-utils)
|
||||
(modules
|
||||
EvalOpt
|
||||
E_Parser
|
||||
E_Lexer
|
||||
E_AST
|
||||
Preproc)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)))
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils)))
|
||||
|
||||
;; Building the lexers of the preprocessor
|
||||
|
||||
|
7
vendors/ligo-utils/simple-utils/dune
vendored
7
vendors/ligo-utils/simple-utils/dune
vendored
@ -4,9 +4,6 @@
|
||||
(libraries
|
||||
yojson
|
||||
unix
|
||||
str
|
||||
)
|
||||
str)
|
||||
(preprocess
|
||||
(pps ppx_let)
|
||||
)
|
||||
)
|
||||
(pps ppx_let)))
|
||||
|
@ -3,20 +3,16 @@ name : "simple-utils"
|
||||
version : "dev"
|
||||
synopsis : "LIGO utilities, to be used by other libraries"
|
||||
maintainer : "Galfour <contact@ligolang.org>"
|
||||
authors : "Galfour"
|
||||
authors : "Galfour, Christian Rinderknecht"
|
||||
license : "MIT"
|
||||
homepage: "https://gitlab.com/ligolang/ligo-utils"
|
||||
bug-reports: "https://gitlab.com/ligolang/ligo-utils/issues"
|
||||
depends: [
|
||||
"dune"
|
||||
"base"
|
||||
"yojson"
|
||||
"ppx_let"
|
||||
# from ppx_let:
|
||||
"ocaml" {>= "4.04.2" & < "4.08.0"}
|
||||
"dune" {build & >= "1.5.1"}
|
||||
"ppxlib" {>= "0.5.0"}
|
||||
]
|
||||
build: [
|
||||
["dune" "build" "-p" name]
|
||||
]
|
||||
homepage : "https://gitlab.com/ligolang/ligo-utils"
|
||||
bug-reports : "https://gitlab.com/ligolang/ligo-utils/issues"
|
||||
depends : ["dune"
|
||||
"base"
|
||||
"yojson"
|
||||
"ppx_let"
|
||||
# from ppx_let:
|
||||
"ocaml" {>= "4.04.2" & < "4.08.0"}
|
||||
"dune" {build & >= "1.5.1"}
|
||||
"ppxlib" {>= "0.5.0"}]
|
||||
build : [["dune" "build" "-p" name]]
|
||||
|
@ -10,10 +10,14 @@ module Int = X_int
|
||||
module Tuple = Tuple
|
||||
module Map = X_map
|
||||
module Tree = Tree
|
||||
module Region = Region
|
||||
module Pos = Pos
|
||||
module Var = Var
|
||||
module Ligo_string = X_string
|
||||
module Display = Display
|
||||
module Runned_result = Runned_result
|
||||
|
||||
(* Originally by Christian Rinderknecht *)
|
||||
|
||||
module Pos = Pos
|
||||
module Region = Region
|
||||
module Utils = Utils
|
||||
module FQueue = FQueue
|
||||
|
Loading…
Reference in New Issue
Block a user