Merge
This commit is contained in:
commit
2b82a74d93
@ -12,14 +12,14 @@ RUN mkdir /package && mkdir /package/bin && mkdir /package/DEBIAN && mkdir /pack
|
||||
RUN cp /home/opam/.opam/4.07/bin/ligo /package/bin/ligo
|
||||
|
||||
# @TODO: inherit version (and other details) from the ligo opam package definition
|
||||
# In our case we're using the version field to name our package accordingly,
|
||||
# In our case we're using the version field to name our package accordingly,
|
||||
# however this is most likely not ideal
|
||||
# Also, the architecture field should not be 'all' but rather specific instead.
|
||||
RUN echo "Package: ligo\n\
|
||||
Version: $version\n\
|
||||
Architecture: all\n\
|
||||
Maintainer: info@ligolang.org\n\
|
||||
Depends: libev4, libgmp10, libgmpxx4ldbl, cpp\n\
|
||||
Depends: libev4, libgmp10, libgmpxx4ldbl\n\
|
||||
Homepage: http://ligolang.org\n\
|
||||
Description: LIGO is a statically typed high-level smart-contract language that compiles down to Michelson." >> /package/DEBIAN/control
|
||||
|
||||
|
@ -29,7 +29,7 @@ RUN opam update
|
||||
|
||||
# Install ligo
|
||||
RUN sh scripts/install_vendors_deps.sh
|
||||
RUN opam install -y .
|
||||
RUN opam install -y . || (tail -n +1 ~/.opam/log/* ; false)
|
||||
|
||||
# Use the ligo binary as a default command
|
||||
ENTRYPOINT [ "/home/opam/.opam/4.07/bin/ligo" ]
|
||||
|
@ -466,8 +466,8 @@ let proxy = ((action, store): (parameter, storage)) : return => {
|
||||
| Some (contract) => contract;
|
||||
| None => (failwith ("Contract not found.") : contract (parameter));
|
||||
};
|
||||
(* Reuse the parameter in the subsequent
|
||||
transaction or use another one, `mock_param`. *)
|
||||
/* Reuse the parameter in the subsequent
|
||||
transaction or use another one, `mock_param`. */
|
||||
let mock_param : parameter = Increment (5n);
|
||||
let op : operation = Tezos.transaction (action, 0tez, counter);
|
||||
([op], store)
|
||||
|
@ -1,6 +1,6 @@
|
||||
name: "ligo"
|
||||
opam-version: "2.0"
|
||||
maintainer: "ligolang@gmail.com"
|
||||
maintainer: "Galfour <contact@ligolang.org>"
|
||||
authors: [ "Galfour" ]
|
||||
homepage: "https://gitlab.com/ligolang/tezos"
|
||||
bug-reports: "https://gitlab.com/ligolang/tezos/issues"
|
||||
@ -23,6 +23,8 @@ depends: [
|
||||
"getopt"
|
||||
"terminal_size"
|
||||
"pprint"
|
||||
"UnionFind"
|
||||
"RedBlackTrees"
|
||||
# work around upstream in-place update
|
||||
"ocaml-migrate-parsetree" { = "1.4.0" }
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if test $# -ne 1 || test "x$1" = "-h" -o "x$1" = "x--help"; then
|
||||
echo "Usage: build_docker_image.sh TAG_NAME"
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
eval $(opam config env)
|
||||
dune build -p ligo
|
||||
|
@ -1,4 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
dockerfile_name="build"
|
||||
# Generic dockerfile
|
||||
|
@ -1,4 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
dockerfile_name="package"
|
||||
dockerfile=""
|
||||
|
@ -1,11 +1,15 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
# This script accepts three arguments, os family, os and its version,
|
||||
# which are subsequently used to fetch the respective docker
|
||||
# image from the ocaml/infrastructure project.
|
||||
#
|
||||
# https://github.com/ocaml/infrastructure/wiki/Containers#selecting-linux-distributions
|
||||
target_os_family=$1
|
||||
target_os=$2
|
||||
target_os_version=$3
|
||||
target_os_family="$1"
|
||||
target_os="$2"
|
||||
target_os_version="$3"
|
||||
|
||||
# Variables configured at the CI level
|
||||
dist="$LIGO_DIST_DIR"
|
||||
@ -29,4 +33,4 @@ fi
|
||||
target_os_specific_dockerfile="./docker/distribution/$target_os_family/$target_os/$dockerfile_name.Dockerfile"
|
||||
if test -f "$target_os_specific_dockerfile"; then
|
||||
dockerfile="$target_os_specific_dockerfile"
|
||||
fi
|
||||
fi
|
||||
|
@ -22,26 +22,29 @@ echo "Installing dependencies.."
|
||||
if [ -n "`uname -a | grep -i arch`" ]
|
||||
then
|
||||
sudo pacman -Sy --noconfirm \
|
||||
rakudo \
|
||||
make \
|
||||
m4 \
|
||||
gcc \
|
||||
patch \
|
||||
bubblewrap \
|
||||
rsync \
|
||||
curl
|
||||
curl
|
||||
fi
|
||||
|
||||
if [ -n "`uname -a | grep -i ubuntu`" ]
|
||||
then
|
||||
sudo apt-get install -y make \
|
||||
perl6 \
|
||||
make \
|
||||
m4 \
|
||||
gcc \
|
||||
patch \
|
||||
bubblewrap \
|
||||
rsync \
|
||||
curl
|
||||
curl
|
||||
fi
|
||||
|
||||
|
||||
if [ -n "`uname -a | grep -i ubuntu`" ]
|
||||
then
|
||||
echo "ubuntu"
|
||||
|
@ -1,11 +1,13 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
. /etc/os-release
|
||||
|
||||
if [ $ID = arch ]
|
||||
then
|
||||
pacman -Sy
|
||||
sudo pacman -S --noconfirm \
|
||||
rakudo \
|
||||
libevdev \
|
||||
perl \
|
||||
pkg-config \
|
||||
@ -20,6 +22,7 @@ then
|
||||
else
|
||||
apt-get update -qq
|
||||
apt-get -y -qq install \
|
||||
perl6 \
|
||||
libev-dev \
|
||||
perl \
|
||||
pkg-config \
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
set -x
|
||||
|
||||
# Install local dependencies
|
||||
opam install -y --deps-only --with-test ./ligo.opam $(find vendors -name \*.opam)
|
||||
|
@ -152,6 +152,18 @@ let compile_file =
|
||||
let doc = "Subcommand: Compile a contract." in
|
||||
(Term.ret term , Term.info ~doc cmdname)
|
||||
|
||||
let preprocess =
|
||||
let f source_file syntax display_format = (
|
||||
toplevel ~display_format @@
|
||||
let%bind pp =
|
||||
Compile.Of_source.preprocess source_file (Syntax_name syntax) in
|
||||
ok @@ Format.asprintf "%s \n" (Buffer.contents pp)
|
||||
) in
|
||||
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
|
||||
let cmdname = "preprocess" in
|
||||
let doc = "Subcommand: Preprocess the source file.\nWarning: Intended for development of LIGO and can break at any time." in
|
||||
(Term.ret term, Term.info ~doc cmdname)
|
||||
|
||||
let print_cst =
|
||||
let f source_file syntax display_format = (
|
||||
toplevel ~display_format @@
|
||||
@ -470,4 +482,5 @@ let run ?argv () =
|
||||
print_ast_typed ;
|
||||
print_mini_c ;
|
||||
list_declarations ;
|
||||
preprocess
|
||||
]
|
||||
|
@ -3,7 +3,7 @@ open Cli_expect
|
||||
let%expect_test _ =
|
||||
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/gitlab_111.religo" ; "main" ] ;
|
||||
[%expect {|
|
||||
ligo: : Parse error in file "gitlab_111.religo", line 2, characters 0-3, after "=" and before "let":
|
||||
ligo: : Parse error in file "gitlab_111.religo", line 2, characters 0-3 at "let", after "=":
|
||||
This is an incorrect let binding.
|
||||
-
|
||||
Examples of correct let bindings:
|
||||
@ -23,7 +23,7 @@ let%expect_test _ =
|
||||
|
||||
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/missing_rpar.religo" ; "main" ] ;
|
||||
[%expect {|
|
||||
ligo: : Parse error in file "missing_rpar.religo", line 5, characters 0-3, after "m" and before "let":
|
||||
ligo: : Parse error in file "missing_rpar.religo", line 5, characters 0-3 at "let", after "m":
|
||||
Missing `)`.
|
||||
{}
|
||||
|
||||
|
@ -53,6 +53,10 @@ let%expect_test _ =
|
||||
measure-contract
|
||||
Subcommand: Measure a contract's compiled size in bytes.
|
||||
|
||||
preprocess
|
||||
Subcommand: Preprocess the source file. Warning: Intended for
|
||||
development of LIGO and can break at any time.
|
||||
|
||||
print-ast
|
||||
Subcommand: Print the AST. Warning: Intended for development of
|
||||
LIGO and can break at any time.
|
||||
@ -140,6 +144,10 @@ let%expect_test _ =
|
||||
measure-contract
|
||||
Subcommand: Measure a contract's compiled size in bytes.
|
||||
|
||||
preprocess
|
||||
Subcommand: Preprocess the source file. Warning: Intended for
|
||||
development of LIGO and can break at any time.
|
||||
|
||||
print-ast
|
||||
Subcommand: Print the AST. Warning: Intended for development of
|
||||
LIGO and can break at any time.
|
||||
|
@ -3,7 +3,7 @@ open Cli_expect
|
||||
let%expect_test _ =
|
||||
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_syntax.ligo" ; "main" ] ;
|
||||
[%expect {|
|
||||
ligo: : Parse error in file "error_syntax.ligo", line 1, characters 16-17, after "bar" and before "-":
|
||||
ligo: : Parse error in file "error_syntax.ligo", line 1, characters 16-17 at "-", after "bar":
|
||||
15: <syntax error> {}
|
||||
|
||||
|
||||
|
17
src/dune
17
src/dune
@ -1,14 +1,13 @@
|
||||
(dirs (:standard \ toto))
|
||||
(dirs (:standard))
|
||||
|
||||
(library
|
||||
(name ligo)
|
||||
(public_name ligo)
|
||||
(libraries
|
||||
simple-utils
|
||||
tezos-utils
|
||||
tezos-micheline
|
||||
main
|
||||
)
|
||||
Preprocessor
|
||||
simple-utils
|
||||
tezos-utils
|
||||
tezos-micheline
|
||||
main)
|
||||
(preprocess
|
||||
(pps ppx_let bisect_ppx --conditional)
|
||||
)
|
||||
)
|
||||
(pps ppx_let bisect_ppx --conditional)))
|
||||
|
@ -148,18 +148,18 @@ let pretty_print_cameligo source =
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~buffer in
|
||||
Parser.Cameligo.ParserLog.pp_ast state ast;
|
||||
Parser_cameligo.ParserLog.pp_ast state ast;
|
||||
ok buffer
|
||||
|
||||
let pretty_print_reasonligo source =
|
||||
let%bind ast = Parser.Reasonligo.parse_file source in
|
||||
let buffer = Buffer.create 59 in
|
||||
let state = (* TODO: Should flow from the CLI *)
|
||||
Parser.Reasonligo.ParserLog.mk_state
|
||||
Parser_cameligo.ParserLog.mk_state
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~buffer in
|
||||
Parser.Reasonligo.ParserLog.pp_ast state ast;
|
||||
Parser_cameligo.ParserLog.pp_ast state ast;
|
||||
ok buffer
|
||||
|
||||
let pretty_print syntax source =
|
||||
@ -169,3 +169,17 @@ let pretty_print syntax source =
|
||||
PascaLIGO -> pretty_print_pascaligo source
|
||||
| CameLIGO -> pretty_print_cameligo source
|
||||
| ReasonLIGO -> pretty_print_reasonligo source
|
||||
|
||||
let preprocess_pascaligo = Parser.Pascaligo.preprocess
|
||||
|
||||
let preprocess_cameligo = Parser.Cameligo.preprocess
|
||||
|
||||
let preprocess_reasonligo = Parser.Reasonligo.preprocess
|
||||
|
||||
let preprocess syntax source =
|
||||
let%bind v_syntax =
|
||||
syntax_to_variant syntax (Some source) in
|
||||
match v_syntax with
|
||||
PascaLIGO -> preprocess_pascaligo source
|
||||
| CameLIGO -> preprocess_cameligo source
|
||||
| ReasonLIGO -> preprocess_reasonligo source
|
||||
|
@ -19,5 +19,8 @@ let compile_contract_input : string -> string -> v_syntax -> Ast_imperative.expr
|
||||
let%bind (storage,parameter) = bind_map_pair (compile_expression syntax) (storage,parameter) in
|
||||
ok @@ Ast_imperative.e_pair storage parameter
|
||||
|
||||
let pretty_print source_filename syntax =
|
||||
Helpers.pretty_print syntax source_filename
|
||||
let pretty_print source_filename syntax =
|
||||
Helpers.pretty_print syntax source_filename
|
||||
|
||||
let preprocess source_filename syntax =
|
||||
Helpers.preprocess syntax source_filename
|
||||
|
@ -4,26 +4,46 @@ module Lexer = Lexer.Make(LexToken)
|
||||
module Scoping = Parser_cameligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_cameligo.ParErr
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* Mock IOs TODO: Fill them with CLI options *)
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string
|
||||
val options : EvalOpt.options
|
||||
end
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
module PreIO =
|
||||
module SubIO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let pre_options =
|
||||
EvalOpt.make ~libs:[]
|
||||
~verbose:SSet.empty
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~cmd:EvalOpt.Quiet
|
||||
~mono:false
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".mligo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = []
|
||||
method verbose = SSet.empty
|
||||
method offsets = true
|
||||
method lang = `CameLIGO
|
||||
method ext = ".mligo"
|
||||
method mode = `Point
|
||||
method cmd = EvalOpt.Quiet
|
||||
method mono = false
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -40,34 +60,33 @@ module ParserLog =
|
||||
include Parser_cameligo.ParserLog
|
||||
end
|
||||
|
||||
module PreUnit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
module Errors =
|
||||
struct
|
||||
(* let data =
|
||||
[("location",
|
||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
|
||||
|
||||
let generic message =
|
||||
let title () = ""
|
||||
and message () = message.Region.value
|
||||
in Trace.error ~data:[] title message
|
||||
end
|
||||
|
||||
let parse (module IO : IO) parser =
|
||||
let module Unit = PreUnit (IO) in
|
||||
let apply parser =
|
||||
let local_fail error =
|
||||
Trace.fail
|
||||
@@ Errors.generic
|
||||
@@ Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error in
|
||||
@@ Unit.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode error in
|
||||
match parser () with
|
||||
Stdlib.Ok semantic_value -> Trace.ok semantic_value
|
||||
|
||||
(* Lexing and parsing errors *)
|
||||
|
||||
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
|
||||
(* System errors *)
|
||||
|
||||
| exception Sys_error msg ->
|
||||
Trace.fail @@ Errors.generic (Region.wrap_ghost msg)
|
||||
(* Scoping errors *)
|
||||
|
||||
| exception Scoping.Error (Scoping.Reserved_name name) ->
|
||||
@ -110,71 +129,18 @@ let parse (module IO : IO) parser =
|
||||
Hint: Change the name.\n",
|
||||
None, invalid))
|
||||
|
||||
let parse_file (source: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options =
|
||||
PreIO.pre_options ~input:(Some source) ~expr:false
|
||||
end in
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs "" in
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(remove_extension @@ basename file) in
|
||||
let suffix = ".pp" ^ IO.ext in
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input in
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input in
|
||||
let open Trace in
|
||||
let%bind () = sys_command cpp_cmd in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ File pp_input) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a file *)
|
||||
|
||||
let parse_string (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:false
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
let parse_file source = apply (fun () -> Unit.contract_in_file source)
|
||||
|
||||
let parse_expression (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:true
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_expr
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a string *)
|
||||
|
||||
let parse_string source = apply (fun () -> Unit.contract_in_string source)
|
||||
|
||||
(* Parsing an expression in a string *)
|
||||
|
||||
let parse_expression source = apply (fun () -> Unit.expr_in_string source)
|
||||
|
||||
(* Preprocessing a contract in a file *)
|
||||
|
||||
let preprocess source = apply (fun () -> Unit.preprocess source)
|
||||
|
21
src/passes/1-parser/cameligo.mli
Normal file
21
src/passes/1-parser/cameligo.mli
Normal file
@ -0,0 +1,21 @@
|
||||
(** This file provides an interface to the CameLIGO parser. *)
|
||||
|
||||
module AST = Parser_cameligo.AST
|
||||
|
||||
(** Open a CameLIGO filename given by string and convert into an
|
||||
abstract syntax tree. *)
|
||||
val parse_file : string -> AST.t Trace.result
|
||||
|
||||
(** Convert a given string into a CameLIGO abstract syntax tree *)
|
||||
val parse_string : string -> AST.t Trace.result
|
||||
|
||||
(** Parse a given string as a CameLIGO expression and return an
|
||||
expression AST.
|
||||
|
||||
This is intended to be used for interactive interpreters, or other
|
||||
scenarios where you would want to parse a CameLIGO expression
|
||||
outside of a contract. *)
|
||||
val parse_expression : string -> AST.expr Trace.result
|
||||
|
||||
(** Preprocess a given CameLIGO file and preprocess it. *)
|
||||
val preprocess : string -> Buffer.t Trace.result
|
@ -1,8 +1,5 @@
|
||||
$HOME/git/OCaml-build/Makefile
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/EvalOpt.ml
|
||||
@ -17,7 +14,9 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.mli
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
|
||||
$HOME/git/ligo/_build/default/src/passes/1-parser/cameligo/ParErr.ml
|
@ -19,6 +19,8 @@ open Utils
|
||||
denoting the _region_ of the occurrence of the keyword "and".
|
||||
*)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
(* Keywords of OCaml *)
|
||||
|
@ -4,8 +4,7 @@ module Region = Simple_utils.Region
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`CameLIGO ~ext:".mligo")
|
||||
end
|
||||
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
@ -2,4 +2,4 @@ SHELL := dash
|
||||
BFLAGS := -strict-sequence -w +A-48-4 -g
|
||||
|
||||
clean::
|
||||
> rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
[@@@warning "-42"]
|
||||
|
||||
open Region
|
||||
open Simple_utils.Region
|
||||
open AST
|
||||
|
||||
(* END HEADER *)
|
||||
|
@ -2,6 +2,7 @@
|
||||
[@@@coverage exclude_file]
|
||||
|
||||
open AST
|
||||
module Region = Simple_utils.Region
|
||||
open! Region
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
@ -866,7 +867,7 @@ and pp_let_in state node =
|
||||
let fields = if lhs_type = None then 3 else 4 in
|
||||
let fields = if kwd_rec = None then fields else fields+1 in
|
||||
let fields = if attributes = [] then fields else fields+1 in
|
||||
let arity =
|
||||
let arity =
|
||||
match kwd_rec with
|
||||
None -> 0
|
||||
| Some (_) ->
|
||||
|
@ -1,9 +1,47 @@
|
||||
(** Driver for the CameLIGO parser *)
|
||||
(* Driver for the CameLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`CameLIGO ~ext:".mligo")
|
||||
end
|
||||
|
||||
module SubIO =
|
||||
struct
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : EvalOpt.language;
|
||||
ext : string;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = IO.options#libs
|
||||
method verbose = IO.options#verbose
|
||||
method offsets = IO.options#offsets
|
||||
method lang = IO.options#lang
|
||||
method ext = IO.options#ext
|
||||
method mode = IO.options#mode
|
||||
method cmd = IO.options#cmd
|
||||
method mono = IO.options#mono
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -23,118 +61,16 @@ module ParserLog =
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(IO)
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
let issue_error error : ('a, string Region.reg) Stdlib.result =
|
||||
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error)
|
||||
|
||||
let parse parser : ('a, string Region.reg) Stdlib.result =
|
||||
try parser () with
|
||||
(* Scoping errors *)
|
||||
|
||||
| Scoping.Error (Scoping.Reserved_name name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
issue_error
|
||||
("Reserved name.\nHint: Change the name.\n", None, invalid))
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_variant name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_constr name.Region.value name.Region.region in
|
||||
let point = "Duplicate constructor in this sum type declaration.\n\
|
||||
Hint: Change the constructor.\n",
|
||||
None, token
|
||||
in issue_error point
|
||||
|
||||
| Scoping.Error (Scoping.Non_linear_pattern var) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident var.Region.value var.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [var] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point = "Repeated variable in this pattern.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_field name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point = "Duplicate field name in this record declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
(* Preprocessing the input source with CPP *)
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ IO.ext
|
||||
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
let wrap = function
|
||||
Stdlib.Ok _ -> flush_all ()
|
||||
| Error msg ->
|
||||
(flush_all (); Printf.eprintf "\027[31m%s\027[0m%!" msg.Region.value)
|
||||
|
||||
let () =
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
|
||||
|
||||
(* Instantiating the lexer and calling the parser *)
|
||||
|
||||
let lexer_inst =
|
||||
match Lexer.open_token_stream (Lexer.File pp_input) with
|
||||
Ok instance ->
|
||||
if IO.options#expr
|
||||
then
|
||||
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value
|
||||
else
|
||||
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value)
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" msg
|
||||
match IO.options#input with
|
||||
None -> Unit.contract_in_stdin () |> wrap
|
||||
| Some file_path -> Unit.contract_in_file file_path |> wrap
|
||||
|
@ -1,5 +1,6 @@
|
||||
[@@@warning "-42"]
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type t =
|
||||
Reserved_name of AST.variable
|
||||
|
@ -1,5 +1,7 @@
|
||||
(* This module exports checks on scoping, called from the parser. *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type t =
|
||||
Reserved_name of AST.variable
|
||||
| Duplicate_variant of AST.variable
|
||||
|
@ -1,2 +0,0 @@
|
||||
module Region = Region
|
||||
module Pos = Pos
|
@ -77,8 +77,8 @@
|
||||
; (targets error.messages)
|
||||
; (deps Parser.mly ParToken.mly error.messages.checked-in)
|
||||
; (action
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; "menhir \
|
||||
; --unused-tokens \
|
||||
; --list-errors \
|
||||
@ -97,11 +97,11 @@
|
||||
(targets error.messages)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
@ -115,8 +115,8 @@
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
@ -135,7 +135,7 @@
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
@ -156,8 +156,8 @@
|
||||
(targets ParErr.ml)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
|
@ -4,26 +4,46 @@ module Lexer = Lexer.Make(LexToken)
|
||||
module Scoping = Parser_pascaligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_pascaligo.ParErr
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* Mock IOs TODO: Fill them with CLI options *)
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string
|
||||
val options : EvalOpt.options
|
||||
end
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
module PreIO =
|
||||
module SubIO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let pre_options =
|
||||
EvalOpt.make ~libs:[]
|
||||
~verbose:SSet.empty
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~cmd:EvalOpt.Quiet
|
||||
~mono:false
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".ligo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = []
|
||||
method verbose = SSet.empty
|
||||
method offsets = true
|
||||
method lang = `PascaLIGO
|
||||
method ext = ".ligo"
|
||||
method mode = `Point
|
||||
method cmd = EvalOpt.Quiet
|
||||
method mono = false
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -40,34 +60,34 @@ module ParserLog =
|
||||
include Parser_pascaligo.ParserLog
|
||||
end
|
||||
|
||||
module PreUnit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
module Errors =
|
||||
struct
|
||||
(* let data =
|
||||
[("location",
|
||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
|
||||
|
||||
let generic message =
|
||||
let title () = ""
|
||||
and message () = message.Region.value
|
||||
in Trace.error ~data:[] title message
|
||||
end
|
||||
|
||||
let parse (module IO : IO) parser =
|
||||
let module Unit = PreUnit (IO) in
|
||||
let apply parser =
|
||||
let local_fail error =
|
||||
Trace.fail
|
||||
@@ Errors.generic
|
||||
@@ Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error in
|
||||
@@ Unit.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode error in
|
||||
match parser () with
|
||||
Stdlib.Ok semantic_value -> Trace.ok semantic_value
|
||||
|
||||
(* Lexing and parsing errors *)
|
||||
|
||||
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
|
||||
|
||||
(* System errors *)
|
||||
|
||||
| exception Sys_error msg ->
|
||||
Trace.fail @@ Errors.generic (Region.wrap_ghost msg)
|
||||
(* Scoping errors *)
|
||||
|
||||
| exception Scoping.Error (Scoping.Reserved_name name) ->
|
||||
@ -121,71 +141,18 @@ let parse (module IO : IO) parser =
|
||||
Hint: Change the name.\n",
|
||||
None, invalid))
|
||||
|
||||
let parse_file source =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options =
|
||||
PreIO.pre_options ~input:(Some source) ~expr:false
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs "" in
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(remove_extension @@ basename file) in
|
||||
let suffix = ".pp" ^ IO.ext in
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input in
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input in
|
||||
let open Trace in
|
||||
let%bind () = sys_command cpp_cmd in
|
||||
match Lexer.(open_token_stream @@ File pp_input) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a file *)
|
||||
|
||||
let parse_string (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:false
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
let parse_file source = apply (fun () -> Unit.contract_in_file source)
|
||||
|
||||
let parse_expression (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:true
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_expr
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a string *)
|
||||
|
||||
let parse_string source = apply (fun () -> Unit.contract_in_string source)
|
||||
|
||||
(* Parsing an expression in a string *)
|
||||
|
||||
let parse_expression source = apply (fun () -> Unit.expr_in_string source)
|
||||
|
||||
(* Preprocessing a contract in a file *)
|
||||
|
||||
let preprocess source = apply (fun () -> Unit.preprocess source)
|
||||
|
@ -16,3 +16,6 @@ val parse_string : string -> AST.t Trace.result
|
||||
scenarios where you would want to parse a PascaLIGO expression
|
||||
outside of a contract. *)
|
||||
val parse_expression : string -> AST.expr Trace.result
|
||||
|
||||
(** Preprocess a given PascaLIGO file and preprocess it. *)
|
||||
val preprocess : string -> Buffer.t Trace.result
|
||||
|
@ -1,8 +1,5 @@
|
||||
$HOME/git/OCaml-build/Makefile
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/EvalOpt.ml
|
||||
@ -21,7 +18,5 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
../shared/Memo.mli
|
||||
../shared/Memo.ml
|
||||
Stubs/Simple_utils.ml
|
||||
$HOME/git/ligo/_build/default/src/passes/1-parser/pascaligo/ParErr.ml
|
||||
|
||||
$HOME/git/ligo/_build/default/src/passes/1-parser/pascaligo/ParErr.ml
|
||||
|
@ -19,6 +19,8 @@ open Utils
|
||||
denoting the _region_ of the occurrence of the keyword "and".
|
||||
*)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
(* Keywords of LIGO *)
|
||||
|
@ -11,8 +11,8 @@ let sprintf = Printf.sprintf
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
module SMap = Utils.String.Map
|
||||
module SSet = Utils.String.Set
|
||||
module SMap = Map.Make (String)
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* Hack to roll back one lexeme in the current semantic action *)
|
||||
(*
|
||||
|
@ -4,8 +4,7 @@ module Region = Simple_utils.Region
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`PascaLIGO ~ext:".ligo")
|
||||
end
|
||||
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
@ -13,4 +12,4 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
let () =
|
||||
match M.trace () with
|
||||
Stdlib.Ok () -> ()
|
||||
| Error Region.{value; _} -> Utils.highlight value
|
||||
| Error Region.{value; _} -> Printf.eprintf "\027[31m%s\027[0m%!" value
|
||||
|
@ -2,4 +2,4 @@ SHELL := dash
|
||||
BFLAGS := -strict-sequence -w +A-48-4 -g
|
||||
|
||||
clean::
|
||||
> rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
|
@ -1,39 +0,0 @@
|
||||
|
||||
module ParserLog = Parser_pascaligo.ParserLog
|
||||
module ParErr = Parser_pascaligo.ParErr
|
||||
module SSet = Utils.String.Set
|
||||
|
||||
(* Mock options. TODO: Plug in cmdliner. *)
|
||||
|
||||
let pre_options =
|
||||
EvalOpt.make
|
||||
~libs:[]
|
||||
~verbose:SSet.empty
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~cmd:EvalOpt.Quiet
|
||||
~mono:true (* Monolithic API of Menhir for now *)
|
||||
(* ~input:None *)
|
||||
(* ~expr:true *)
|
||||
|
||||
module Parser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser_pascaligo.Parser
|
||||
end
|
||||
|
||||
module ParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser_pascaligo.ParserLog
|
||||
end
|
||||
|
||||
module PreUnit = ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
|
||||
module Front = ParserAPI.Make (Lexer)(Parser)(ParErr)
|
||||
|
||||
let issue_error point =
|
||||
let error = Front.format_error ~offsets:true (* TODO: CLI *)
|
||||
`Point (* TODO: CLI *) point
|
||||
in Stdlib.Error error
|
@ -3,7 +3,7 @@
|
||||
|
||||
[@@@warning "-42"]
|
||||
|
||||
open Region
|
||||
open Simple_utils.Region
|
||||
open AST
|
||||
|
||||
(* END HEADER *)
|
||||
|
@ -2,6 +2,8 @@
|
||||
[@@@coverage exclude_file]
|
||||
|
||||
open AST
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
open! Region
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
@ -1,9 +1,47 @@
|
||||
(* Driver for the PascaLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`PascaLIGO ~ext:".ligo")
|
||||
end
|
||||
|
||||
module SubIO =
|
||||
struct
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : EvalOpt.language;
|
||||
ext : string;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = IO.options#libs
|
||||
method verbose = IO.options#verbose
|
||||
method offsets = IO.options#offsets
|
||||
method lang = IO.options#lang
|
||||
method ext = IO.options#ext
|
||||
method mode = IO.options#mode
|
||||
method cmd = IO.options#cmd
|
||||
method mono = IO.options#mono
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -23,130 +61,16 @@ module ParserLog =
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(IO)
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
let issue_error error : ('a, string Region.reg) Stdlib.result =
|
||||
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error)
|
||||
|
||||
let parse parser : ('a, string Region.reg) Stdlib.result =
|
||||
try parser () with
|
||||
(* Scoping errors *)
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_parameter name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
issue_error ("Duplicate parameter.\nHint: Change the name.\n",
|
||||
None, invalid))
|
||||
|
||||
| Scoping.Error (Scoping.Reserved_name name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
issue_error
|
||||
("Reserved name.\nHint: Change the name.\n", None, invalid))
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_variant name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_constr name.Region.value name.Region.region in
|
||||
let point = "Duplicate constructor in this sum type declaration.\n\
|
||||
Hint: Change the constructor.\n",
|
||||
None, token
|
||||
in issue_error point
|
||||
|
||||
| Scoping.Error (Scoping.Non_linear_pattern var) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident var.Region.value var.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [var] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point = "Repeated variable in this pattern.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_field name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point =
|
||||
"Duplicate field name in this record declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
(* Preprocessing the input source with CPP *)
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ IO.ext
|
||||
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
let wrap = function
|
||||
Stdlib.Ok _ -> flush_all ()
|
||||
| Error msg ->
|
||||
(flush_all (); Printf.eprintf "\027[31m%s\027[0m%!" msg.Region.value)
|
||||
|
||||
let () =
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
|
||||
|
||||
(* Instantiating the lexer and calling the parser *)
|
||||
|
||||
let lexer_inst =
|
||||
match Lexer.open_token_stream (Lexer.File pp_input) with
|
||||
Ok instance ->
|
||||
if IO.options#expr
|
||||
then
|
||||
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value
|
||||
else
|
||||
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value)
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" msg
|
||||
match IO.options#input with
|
||||
None -> Unit.contract_in_stdin () |> wrap
|
||||
| Some file_path -> Unit.contract_in_file file_path |> wrap
|
||||
|
@ -1,5 +1,6 @@
|
||||
[@@@warning "-42"]
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type t =
|
||||
Reserved_name of AST.variable
|
||||
|
@ -1,5 +1,7 @@
|
||||
(* This module exports checks on scoping, called from the parser. *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type t =
|
||||
Reserved_name of AST.variable
|
||||
| Duplicate_parameter of AST.variable
|
||||
|
@ -1,2 +0,0 @@
|
||||
module Region = Region
|
||||
module Pos = Pos
|
@ -20,6 +20,7 @@
|
||||
menhirLib
|
||||
parser_shared
|
||||
hex
|
||||
Preprocessor
|
||||
simple-utils)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
@ -77,8 +78,8 @@
|
||||
; (targets error.messages)
|
||||
; (deps Parser.mly ParToken.mly error.messages.checked-in)
|
||||
; (action
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; "menhir \
|
||||
; --unused-tokens \
|
||||
; --list-errors \
|
||||
@ -97,11 +98,11 @@
|
||||
(targets error.messages)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
@ -115,8 +116,8 @@
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
@ -135,7 +136,7 @@
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
@ -156,8 +157,8 @@
|
||||
(targets ParErr.ml)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
@ -170,4 +171,3 @@
|
||||
)
|
||||
))
|
||||
)
|
||||
|
||||
|
@ -2,31 +2,51 @@ open Trace
|
||||
|
||||
module AST = Parser_cameligo.AST
|
||||
module LexToken = Parser_reasonligo.LexToken
|
||||
module Lexer = Lexer.Make(LexToken)
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Scoping = Parser_cameligo.Scoping
|
||||
module Region = Simple_utils.Region
|
||||
module ParErr = Parser_reasonligo.ParErr
|
||||
module SyntaxError = Parser_reasonligo.SyntaxError
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* Mock IOs TODO: Fill them with CLI options *)
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string
|
||||
val options : EvalOpt.options
|
||||
end
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
module PreIO =
|
||||
module SubIO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let pre_options =
|
||||
EvalOpt.make ~libs:[]
|
||||
~verbose:SSet.empty
|
||||
~offsets:true
|
||||
~mode:`Point
|
||||
~cmd:EvalOpt.Quiet
|
||||
~mono:false
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".religo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = []
|
||||
method verbose = SSet.empty
|
||||
method offsets = true
|
||||
method lang = `ReasonLIGO
|
||||
method ext = ".religo"
|
||||
method mode = `Point
|
||||
method cmd = EvalOpt.Quiet
|
||||
method mono = false
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -43,8 +63,8 @@ module ParserLog =
|
||||
include Parser_cameligo.ParserLog
|
||||
end
|
||||
|
||||
module PreUnit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
module Errors =
|
||||
struct
|
||||
@ -55,23 +75,23 @@ module Errors =
|
||||
|
||||
let wrong_function_arguments (expr: AST.expr) =
|
||||
let title () = "" in
|
||||
let message () = "It looks like you are defining a function, \
|
||||
however we do not\n\
|
||||
understand the parameters declaration.\n\
|
||||
Examples of valid functions:\n\
|
||||
let x = (a: string, b: int) : int => 3;\n\
|
||||
let tuple = ((a, b): (int, int)) => a + b; \n\
|
||||
let x = (a: string) : string => \"Hello, \" ++ a;\n"
|
||||
in
|
||||
let message () =
|
||||
"It looks like you are defining a function, \
|
||||
however we do not\n\
|
||||
understand the parameters declaration.\n\
|
||||
Examples of valid functions:\n\
|
||||
let x = (a: string, b: int) : int => 3;\n\
|
||||
let tuple = ((a, b): (int, int)) => a + b; \n\
|
||||
let x = (a: string) : string => \"Hello, \" ++ a;\n" in
|
||||
let expression_loc = AST.expr_to_region expr in
|
||||
let data = [
|
||||
("location",
|
||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)]
|
||||
in error ~data title message
|
||||
|
||||
let invalid_wild (expr: AST.expr) =
|
||||
|
||||
let invalid_wild (expr: AST.expr) =
|
||||
let title () = "" in
|
||||
let message () =
|
||||
let message () =
|
||||
"It looks like you are using a wild pattern where it cannot be used."
|
||||
in
|
||||
let expression_loc = AST.expr_to_region expr in
|
||||
@ -82,13 +102,12 @@ module Errors =
|
||||
|
||||
end
|
||||
|
||||
let parse (module IO : IO) parser =
|
||||
let module Unit = PreUnit (IO) in
|
||||
let apply parser =
|
||||
let local_fail error =
|
||||
Trace.fail
|
||||
@@ Errors.generic
|
||||
@@ Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error in
|
||||
@@ Unit.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode error in
|
||||
match parser () with
|
||||
Stdlib.Ok semantic_value -> Trace.ok semantic_value
|
||||
|
||||
@ -142,71 +161,18 @@ let parse (module IO : IO) parser =
|
||||
| exception SyntaxError.Error (SyntaxError.InvalidWild expr) ->
|
||||
Trace.fail @@ Errors.invalid_wild expr
|
||||
|
||||
let parse_file (source: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options =
|
||||
PreIO.pre_options ~input:(Some source) ~expr:false
|
||||
end in
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs "" in
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(remove_extension @@ basename file) in
|
||||
let suffix = ".pp" ^ IO.ext in
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input in
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input in
|
||||
let open Trace in
|
||||
let%bind () = sys_command cpp_cmd in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ File pp_input) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a file *)
|
||||
|
||||
let parse_string (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:false
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_contract
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
let parse_file source = apply (fun () -> Unit.contract_in_file source)
|
||||
|
||||
let parse_expression (s: string) =
|
||||
let module IO =
|
||||
struct
|
||||
let ext = PreIO.ext
|
||||
let options = PreIO.pre_options ~input:None ~expr:true
|
||||
end in
|
||||
let module Unit = PreUnit (IO) in
|
||||
match Lexer.(open_token_stream @@ String s) with
|
||||
Ok instance ->
|
||||
let thunk () = Unit.apply instance Unit.parse_expr
|
||||
in parse (module IO) thunk
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
|
||||
(* Parsing a contract in a string *)
|
||||
|
||||
let parse_string source = apply (fun () -> Unit.contract_in_string source)
|
||||
|
||||
(* Parsing an expression in a string *)
|
||||
|
||||
let parse_expression source = apply (fun () -> Unit.expr_in_string source)
|
||||
|
||||
(* Preprocessing a contract in a file *)
|
||||
|
||||
let preprocess source = apply (fun () -> Unit.preprocess source)
|
||||
|
21
src/passes/1-parser/reasonligo.mli
Normal file
21
src/passes/1-parser/reasonligo.mli
Normal file
@ -0,0 +1,21 @@
|
||||
(** This file provides an interface to the ReasonLIGO parser. *)
|
||||
|
||||
module AST = Parser_cameligo.AST
|
||||
|
||||
(** Open a ReasonLIGO filename given by string and convert into an
|
||||
abstract syntax tree. *)
|
||||
val parse_file : string -> AST.t Trace.result
|
||||
|
||||
(** Convert a given string into a ReasonLIGO abstract syntax tree *)
|
||||
val parse_string : string -> AST.t Trace.result
|
||||
|
||||
(** Parse a given string as a ReasonLIGO expression and return an
|
||||
expression AST.
|
||||
|
||||
This is intended to be used for interactive interpreters, or other
|
||||
scenarios where you would want to parse a ReasonLIGO expression
|
||||
outside of a contract. *)
|
||||
val parse_expression : string -> AST.expr Trace.result
|
||||
|
||||
(** Preprocess a given ReasonLIGO file and preprocess it. *)
|
||||
val preprocess : string -> Buffer.t Trace.result
|
@ -1,8 +1,5 @@
|
||||
$HOME/git/OCaml-build/Makefile
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/EvalOpt.ml
|
||||
@ -17,13 +14,17 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.mli
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
|
||||
Stubs/Parser_cameligo.ml
|
||||
|
||||
../cameligo/AST.ml
|
||||
../cameligo/ParserLog.mli
|
||||
../cameligo/ParserLog.ml
|
||||
../cameligo/Scoping.mli
|
||||
../cameligo/Scoping.ml
|
||||
$HOME/git/ligo/_build/default/src/passes/1-parser/reasonligo/ParErr.ml
|
||||
|
||||
$HOME/git/ligo/_build/default/src/passes/1-parser/reasonligo/ParErr.ml
|
||||
|
@ -4,8 +4,7 @@ module Region = Simple_utils.Region
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`ReasonLIGO ~ext:".religo")
|
||||
end
|
||||
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
@ -2,4 +2,4 @@ SHELL := dash
|
||||
BFLAGS := -strict-sequence -w +A-48-4 -g
|
||||
|
||||
clean::
|
||||
> rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
[@@@warning "-42"]
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
open Region
|
||||
module AST = Parser_cameligo.AST
|
||||
open! AST
|
||||
@ -560,7 +561,7 @@ fun_expr:
|
||||
in raise (Error (WrongFunctionArguments e))
|
||||
in
|
||||
let binders = fun_args_to_pattern $1 in
|
||||
let lhs_type = match $1 with
|
||||
let lhs_type = match $1 with
|
||||
EAnnot {value = {inside = _ , _, t; _}; region = r} -> Some (r,t)
|
||||
| _ -> None
|
||||
in
|
||||
|
@ -1,9 +1,47 @@
|
||||
(** Driver for the ReasonLIGO parser *)
|
||||
(* Driver for the ReasonLIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" ext
|
||||
let options = EvalOpt.(read ~lang:`ReasonLIGO ~ext:".religo")
|
||||
end
|
||||
|
||||
module SubIO =
|
||||
struct
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : EvalOpt.language;
|
||||
ext : string;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
let options : options =
|
||||
object
|
||||
method libs = IO.options#libs
|
||||
method verbose = IO.options#verbose
|
||||
method offsets = IO.options#offsets
|
||||
method lang = IO.options#lang
|
||||
method ext = IO.options#ext
|
||||
method mode = IO.options#mode
|
||||
method cmd = IO.options#cmd
|
||||
method mono = IO.options#mono
|
||||
end
|
||||
|
||||
let make =
|
||||
EvalOpt.make ~libs:options#libs
|
||||
~verbose:options#verbose
|
||||
~offsets:options#offsets
|
||||
~lang:options#lang
|
||||
~ext:options#ext
|
||||
~mode:options#mode
|
||||
~cmd:options#cmd
|
||||
~mono:options#mono
|
||||
end
|
||||
|
||||
module Parser =
|
||||
@ -23,138 +61,16 @@ module ParserLog =
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
module Unit =
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(IO)
|
||||
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)(SubIO)
|
||||
|
||||
(* Main *)
|
||||
|
||||
let issue_error error : ('a, string Region.reg) Stdlib.result =
|
||||
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode error)
|
||||
|
||||
let parse parser : ('a, string Region.reg) Stdlib.result =
|
||||
try parser () with
|
||||
(* Ad hoc errors from the parser *)
|
||||
|
||||
SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
|
||||
let msg = "It looks like you are defining a function, \
|
||||
however we do not\n\
|
||||
understand the parameters declaration.\n\
|
||||
Examples of valid functions:\n\
|
||||
let x = (a: string, b: int) : int => 3;\n\
|
||||
let x = (a: string) : string => \"Hello, \" ++ a;\n"
|
||||
and region = AST.expr_to_region expr in
|
||||
let error = Unit.short_error ~offsets:IO.options#offsets
|
||||
IO.options#mode msg region
|
||||
in Stdlib.Error Region.{value=error; region}
|
||||
|
||||
(* Scoping errors *)
|
||||
| SyntaxError.Error (SyntaxError.InvalidWild expr) ->
|
||||
let msg = "It looks like you are using a wild pattern where it cannot be used.\n"
|
||||
and region = AST.expr_to_region expr in
|
||||
let error = Unit.short_error ~offsets:IO.options#offsets
|
||||
IO.options#mode msg region
|
||||
in Stdlib.Error Region.{value=error; region}
|
||||
| Scoping.Error (Scoping.Reserved_name name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
issue_error
|
||||
("Reserved name.\nHint: Change the name.\n", None, invalid))
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_variant name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_constr name.Region.value name.Region.region in
|
||||
let point = "Duplicate constructor in this sum type declaration.\n\
|
||||
Hint: Change the constructor.\n",
|
||||
None, token
|
||||
in issue_error point
|
||||
|
||||
| Scoping.Error (Scoping.Non_linear_pattern var) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident var.Region.value var.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [var] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point = "Repeated variable in this pattern.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
| Scoping.Error (Scoping.Duplicate_field name) ->
|
||||
let token =
|
||||
Lexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
(* Cannot fail because [name] is a not a
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point =
|
||||
"Duplicate field name in this record declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
||||
(* Preprocessing the input source with CPP *)
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ IO.ext
|
||||
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out =
|
||||
Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
let wrap = function
|
||||
Stdlib.Ok _ -> flush_all ()
|
||||
| Error msg ->
|
||||
(flush_all (); Printf.eprintf "\027[31m%s\027[0m%!" msg.Region.value)
|
||||
|
||||
let () =
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
|
||||
|
||||
(* Instantiating the lexer and calling the parser *)
|
||||
|
||||
let lexer_inst =
|
||||
match Lexer.open_token_stream (Lexer.File pp_input) with
|
||||
Ok instance ->
|
||||
if IO.options#expr
|
||||
then
|
||||
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value
|
||||
else
|
||||
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
|
||||
Stdlib.Ok _ -> ()
|
||||
| Error Region.{value; _} ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" value)
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Printf.eprintf "\027[31m%s\027[0m%!" msg
|
||||
match IO.options#input with
|
||||
None -> Unit.contract_in_stdin () |> wrap
|
||||
| Some file_path -> Unit.contract_in_file file_path |> wrap
|
||||
|
@ -1,2 +0,0 @@
|
||||
module Region = Region
|
||||
module Pos = Pos
|
@ -73,14 +73,13 @@
|
||||
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=religo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))
|
||||
|
||||
;; Error messages
|
||||
|
||||
;; Generate error messages from scratch
|
||||
; (rule
|
||||
; (targets error.messages)
|
||||
; (deps Parser.mly ParToken.mly error.messages.checked-in)
|
||||
; (action
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; (with-stdout-to %{targets}
|
||||
; (bash
|
||||
; "menhir \
|
||||
; --unused-tokens \
|
||||
; --list-errors \
|
||||
@ -99,11 +98,11 @@
|
||||
(targets error.messages)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--update-errors error.messages.checked-in
|
||||
--update-errors error.messages.checked-in
|
||||
--table
|
||||
--strict
|
||||
--external-tokens LexToken.mli
|
||||
@ -117,8 +116,8 @@
|
||||
(rule
|
||||
(target error.messages.new)
|
||||
(action
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
(with-stdout-to %{target}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--list-errors
|
||||
@ -137,7 +136,7 @@
|
||||
(name runtest)
|
||||
(deps error.messages error.messages.new)
|
||||
(action
|
||||
(run
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
@ -158,8 +157,8 @@
|
||||
(targets ParErr.ml)
|
||||
(deps Parser.mly ParToken.mly error.messages.checked-in LexToken.mli)
|
||||
(action
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
(with-stdout-to %{targets}
|
||||
(run
|
||||
menhir
|
||||
--unused-tokens
|
||||
--table
|
||||
|
@ -1,7 +1,7 @@
|
||||
$HOME/git/OCaml-build/Makefile
|
||||
$HOME/git/OCaml-build/Makefile.cfg
|
||||
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
|
@ -1,45 +1,62 @@
|
||||
(** Parsing command-line options *)
|
||||
(* Parsing command-line options *)
|
||||
|
||||
(* The type [command] denotes some possible behaviours of the
|
||||
compiler. *)
|
||||
|
||||
(** The type [command] denotes some possible behaviours of the
|
||||
compiler.
|
||||
*)
|
||||
type command = Quiet | Copy | Units | Tokens
|
||||
|
||||
(** The type [options] gathers the command-line options.
|
||||
*)
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
let lang_to_string = function
|
||||
`PascaLIGO -> "PascaLIGO"
|
||||
| `CameLIGO -> "CameLIGO"
|
||||
| `ReasonLIGO -> "ReasonLIGO"
|
||||
|
||||
(* The type [options] gathers the command-line options. *)
|
||||
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
type options = <
|
||||
input : string option;
|
||||
libs : string list;
|
||||
verbose : Utils.String.Set.t;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".ligo", ".mligo", ".religo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : command;
|
||||
mono : bool;
|
||||
expr : bool
|
||||
>
|
||||
|
||||
let make ~input ~libs ~verbose ~offsets ~mode ~cmd ~mono ~expr =
|
||||
let make ~input ~libs ~verbose ~offsets ~lang ~ext ~mode ~cmd ~mono ~expr : options =
|
||||
object
|
||||
method input = input
|
||||
method libs = libs
|
||||
method verbose = verbose
|
||||
method offsets = offsets
|
||||
method lang = lang
|
||||
method ext = ext
|
||||
method mode = mode
|
||||
method cmd = cmd
|
||||
method mono = mono
|
||||
method expr = expr
|
||||
end
|
||||
|
||||
(** {1 Auxiliary functions} *)
|
||||
(* Auxiliary functions *)
|
||||
|
||||
let printf = Printf.printf
|
||||
let sprintf = Printf.sprintf
|
||||
let print = print_endline
|
||||
|
||||
let abort msg =
|
||||
Utils.highlight (sprintf "Command-line error: %s\n" msg); exit 1
|
||||
(* Printing a string in red to standard error *)
|
||||
|
||||
(** {1 Help} *)
|
||||
let highlight msg = Printf.eprintf "\027[31m%s\027[0m%!" msg
|
||||
|
||||
let abort msg =
|
||||
highlight (sprintf "Command-line error: %s\n" msg); exit 1
|
||||
|
||||
(* Help *)
|
||||
|
||||
let help language extension () =
|
||||
let file = Filename.basename Sys.argv.(0) in
|
||||
@ -55,16 +72,16 @@ let help language extension () =
|
||||
print " --bytes Bytes for source locations";
|
||||
print " --mono Use Menhir monolithic API";
|
||||
print " --expr Parse an expression";
|
||||
print " --verbose=<stages> cli, cpp, ast-tokens, ast (colon-separated)";
|
||||
print " --verbose=<stages> cli, preproc, ast-tokens, ast (colon-separated)";
|
||||
print " --version Commit hash on stdout";
|
||||
print " -h, --help This help";
|
||||
exit 0
|
||||
|
||||
(** {1 Version} *)
|
||||
(* Version *)
|
||||
|
||||
let version () = printf "%s\n" Version.version; exit 0
|
||||
|
||||
(** {1 Specifying the command-line options a la GNU} *)
|
||||
(* Specifying the command-line options a la GNU *)
|
||||
|
||||
let copy = ref false
|
||||
and tokens = ref false
|
||||
@ -72,7 +89,7 @@ and units = ref false
|
||||
and quiet = ref false
|
||||
and columns = ref false
|
||||
and bytes = ref false
|
||||
and verbose = ref Utils.String.Set.empty
|
||||
and verbose = ref SSet.empty
|
||||
and input = ref None
|
||||
and libs = ref []
|
||||
and verb_str = ref ""
|
||||
@ -84,11 +101,12 @@ let split_at_colon = Str.(split (regexp ":"))
|
||||
let add_path p = libs := !libs @ split_at_colon p
|
||||
|
||||
let add_verbose d =
|
||||
verbose := List.fold_left (Utils.swap Utils.String.Set.add)
|
||||
verbose := List.fold_left (fun x y -> SSet.add y x)
|
||||
!verbose
|
||||
(split_at_colon d)
|
||||
|
||||
let specs language extension =
|
||||
let language = lang_to_string language in
|
||||
let open! Getopt in [
|
||||
'I', nolong, None, Some add_path;
|
||||
'c', "copy", set copy true, None;
|
||||
@ -105,17 +123,15 @@ let specs language extension =
|
||||
]
|
||||
;;
|
||||
|
||||
(** Handler of anonymous arguments
|
||||
*)
|
||||
(* Handler of anonymous arguments *)
|
||||
|
||||
let anonymous arg =
|
||||
match !input with
|
||||
None -> input := Some arg
|
||||
| Some s -> Printf.printf "s=%s\n" s;
|
||||
abort (sprintf "Multiple inputs")
|
||||
;;
|
||||
| Some _ -> abort (sprintf "Multiple inputs")
|
||||
|
||||
(* Checking options and exporting them as non-mutable values *)
|
||||
|
||||
(** Checking options and exporting them as non-mutable values
|
||||
*)
|
||||
let string_of convert = function
|
||||
None -> "None"
|
||||
| Some s -> sprintf "Some %s" (convert s)
|
||||
@ -139,21 +155,20 @@ let print_opt () =
|
||||
printf "verbose = %s\n" !verb_str;
|
||||
printf "input = %s\n" (string_of quote !input);
|
||||
printf "libs = %s\n" (string_of_path !libs)
|
||||
;;
|
||||
|
||||
let check extension =
|
||||
let check lang ext =
|
||||
let () =
|
||||
if Utils.String.Set.mem "cli" !verbose then print_opt () in
|
||||
if SSet.mem "cli" !verbose then print_opt () in
|
||||
|
||||
let input =
|
||||
match !input with
|
||||
None | Some "-" -> !input
|
||||
None | Some "-" -> None
|
||||
| Some file_path ->
|
||||
if Filename.check_suffix file_path extension
|
||||
if Filename.check_suffix file_path ext
|
||||
then if Sys.file_exists file_path
|
||||
then Some file_path
|
||||
else abort "Source file not found."
|
||||
else abort ("Source file lacks the extension " ^ extension ^ ".") in
|
||||
else abort ("Source file lacks the extension " ^ ext ^ ".") in
|
||||
|
||||
(* Exporting remaining options as non-mutable values *)
|
||||
|
||||
@ -169,7 +184,7 @@ let check extension =
|
||||
and libs = !libs in
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cli" verbose then
|
||||
if SSet.mem "cli" verbose then
|
||||
begin
|
||||
printf "\nEXPORTED COMMAND LINE\n";
|
||||
printf "copy = %b\n" copy;
|
||||
@ -194,16 +209,16 @@ let check extension =
|
||||
| false, false, false, true -> Tokens
|
||||
| _ -> abort "Choose one of -q, -c, -u, -t."
|
||||
|
||||
in make ~input ~libs ~verbose ~offsets ~mode ~cmd ~mono ~expr
|
||||
in make ~input ~libs ~verbose ~offsets ~mode ~cmd ~mono ~expr ~lang ~ext
|
||||
|
||||
(** {1 Parsing the command-line options} *)
|
||||
(* Parsing the command-line options *)
|
||||
|
||||
let read language extension =
|
||||
let read ~lang ~ext =
|
||||
try
|
||||
Getopt.parse_cmdline (specs language extension) anonymous;
|
||||
Getopt.parse_cmdline (specs lang ext) anonymous;
|
||||
(verb_str :=
|
||||
let apply e a =
|
||||
if a = "" then e else Printf.sprintf "%s, %s" e a
|
||||
in Utils.String.Set.fold apply !verbose "");
|
||||
check extension
|
||||
in SSet.fold apply !verbose "");
|
||||
check lang ext
|
||||
with Getopt.Error msg -> abort msg
|
||||
|
@ -48,11 +48,20 @@ type command = Quiet | Copy | Units | Tokens
|
||||
expressions is used, otherwise a full-fledged contract is
|
||||
expected.}
|
||||
} *)
|
||||
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
val lang_to_string : language -> string
|
||||
|
||||
module SSet : Set.S with type elt = string and type t = Set.Make(String).t
|
||||
|
||||
type options = <
|
||||
input : string option;
|
||||
libs : string list;
|
||||
verbose : Utils.String.Set.t;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".ligo", ".mligo", ".religo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : command;
|
||||
mono : bool;
|
||||
@ -62,8 +71,10 @@ type options = <
|
||||
val make :
|
||||
input:string option ->
|
||||
libs:string list ->
|
||||
verbose:Utils.String.Set.t ->
|
||||
verbose:SSet.t ->
|
||||
offsets:bool ->
|
||||
lang:language ->
|
||||
ext:string ->
|
||||
mode:[`Byte | `Point] ->
|
||||
cmd:command ->
|
||||
mono:bool ->
|
||||
@ -71,7 +82,7 @@ val make :
|
||||
options
|
||||
|
||||
(** Parsing the command-line options on stdin. The first parameter is
|
||||
the name of the concrete syntax, e.g., "pascaligo", and the second
|
||||
is the file extension, e.g., ".ligo".
|
||||
*)
|
||||
val read : string -> string -> options
|
||||
the name of the concrete syntax, e.g., [PascaLIGO], and the second
|
||||
is the expected file extension, e.g., ".ligo". *)
|
||||
|
||||
val read : lang:language -> ext:string -> options
|
||||
|
@ -135,7 +135,14 @@ module type S =
|
||||
|
||||
val slide : token -> window -> window
|
||||
|
||||
type input =
|
||||
File of file_path
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type instance = {
|
||||
input : input;
|
||||
read : log:logger -> Lexing.lexbuf -> token;
|
||||
buffer : Lexing.lexbuf;
|
||||
get_win : unit -> window;
|
||||
@ -145,16 +152,15 @@ module type S =
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
type input =
|
||||
File of file_path (* "-" means stdin *)
|
||||
| Stdin
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type open_err = File_opening of string
|
||||
|
||||
val open_token_stream : input -> (instance, open_err) Stdlib.result
|
||||
val lexbuf_from_input :
|
||||
input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result
|
||||
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
val open_token_stream :
|
||||
language -> input -> (instance, open_err) Stdlib.result
|
||||
|
||||
(* Error reporting *)
|
||||
|
||||
|
@ -157,7 +157,14 @@ module type S =
|
||||
|
||||
val slide : token -> window -> window
|
||||
|
||||
type input =
|
||||
File of file_path
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type instance = {
|
||||
input : input;
|
||||
read : log:logger -> Lexing.lexbuf -> token;
|
||||
buffer : Lexing.lexbuf;
|
||||
get_win : unit -> window;
|
||||
@ -167,16 +174,15 @@ module type S =
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
type input =
|
||||
File of file_path (* "-" means stdin *)
|
||||
| Stdin
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type open_err = File_opening of string
|
||||
|
||||
val open_token_stream : input -> (instance, open_err) Stdlib.result
|
||||
val lexbuf_from_input :
|
||||
input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result
|
||||
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
val open_token_stream :
|
||||
language -> input -> (instance, open_err) Stdlib.result
|
||||
|
||||
(* Error reporting *)
|
||||
|
||||
@ -254,7 +260,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
Nil -> One token
|
||||
| One t | Two (t,_) -> Two (token,t)
|
||||
|
||||
(** Beyond tokens, the result of lexing is a state. The type
|
||||
(* Beyond tokens, the result of lexing is a state. The type
|
||||
[state] represents the logical state of the lexing engine, that
|
||||
is, a value which is threaded during scanning and which denotes
|
||||
useful, high-level information beyond what the type
|
||||
@ -292,6 +298,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
it to [decoder]. See the documentation of the third-party
|
||||
library Uutf.
|
||||
*)
|
||||
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
type state = {
|
||||
units : (Markup.t list * token) FQueue.t;
|
||||
markup : Markup.t list;
|
||||
@ -299,7 +308,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
last : Region.t;
|
||||
pos : Pos.t;
|
||||
decoder : Uutf.decoder;
|
||||
supply : Bytes.t -> int -> int -> unit
|
||||
supply : Bytes.t -> int -> int -> unit;
|
||||
lang : language
|
||||
}
|
||||
|
||||
(* The call [enqueue (token, state)] updates functionally the
|
||||
@ -388,7 +398,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
| Unterminated_string
|
||||
| Unterminated_integer
|
||||
| Odd_lengthed_bytes
|
||||
| Unterminated_comment
|
||||
| Unterminated_comment of string
|
||||
| Orphan_minus
|
||||
| Non_canonical_zero
|
||||
| Negative_byte_sequence
|
||||
@ -401,51 +411,51 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
|
||||
let error_to_string = function
|
||||
Invalid_utf8_sequence ->
|
||||
"Invalid UTF-8 sequence.\n"
|
||||
"Invalid UTF-8 sequence."
|
||||
| Unexpected_character c ->
|
||||
sprintf "Unexpected character '%s'.\n" (Char.escaped c)
|
||||
sprintf "Unexpected character '%s'." (Char.escaped c)
|
||||
| Undefined_escape_sequence ->
|
||||
"Undefined escape sequence.\n\
|
||||
Hint: Remove or replace the sequence.\n"
|
||||
Hint: Remove or replace the sequence."
|
||||
| Missing_break ->
|
||||
"Missing break.\n\
|
||||
Hint: Insert some space.\n"
|
||||
Hint: Insert some space."
|
||||
| Unterminated_string ->
|
||||
"Unterminated string.\n\
|
||||
Hint: Close with double quotes.\n"
|
||||
Hint: Close with double quotes."
|
||||
| Unterminated_integer ->
|
||||
"Unterminated integer.\n\
|
||||
Hint: Remove the sign or proceed with a natural number.\n"
|
||||
Hint: Remove the sign or proceed with a natural number."
|
||||
| Odd_lengthed_bytes ->
|
||||
"The length of the byte sequence is an odd number.\n\
|
||||
Hint: Add or remove a digit.\n"
|
||||
| Unterminated_comment ->
|
||||
"Unterminated comment.\n\
|
||||
Hint: Close with \"*)\".\n"
|
||||
Hint: Add or remove a digit."
|
||||
| Unterminated_comment ending ->
|
||||
sprintf "Unterminated comment.\n\
|
||||
Hint: Close with \"%s\"." ending
|
||||
| Orphan_minus ->
|
||||
"Orphan minus sign.\n\
|
||||
Hint: Remove the trailing space.\n"
|
||||
Hint: Remove the trailing space."
|
||||
| Non_canonical_zero ->
|
||||
"Non-canonical zero.\n\
|
||||
Hint: Use 0.\n"
|
||||
Hint: Use 0."
|
||||
| Negative_byte_sequence ->
|
||||
"Negative byte sequence.\n\
|
||||
Hint: Remove the leading minus sign.\n"
|
||||
Hint: Remove the leading minus sign."
|
||||
| Broken_string ->
|
||||
"The string starting here is interrupted by a line break.\n\
|
||||
Hint: Remove the break, close the string before or insert a \
|
||||
backslash.\n"
|
||||
backslash."
|
||||
| Invalid_character_in_string ->
|
||||
"Invalid character in string.\n\
|
||||
Hint: Remove or replace the character.\n"
|
||||
Hint: Remove or replace the character."
|
||||
| Reserved_name s ->
|
||||
sprintf "Reserved name: \"%s\".\n\
|
||||
Hint: Change the name.\n" s
|
||||
Hint: Change the name." s
|
||||
| Invalid_symbol ->
|
||||
"Invalid symbol.\n\
|
||||
Hint: Check the LIGO syntax you use.\n"
|
||||
Hint: Check the LIGO syntax you use."
|
||||
| Invalid_natural ->
|
||||
"Invalid natural."
|
||||
"Invalid natural number."
|
||||
| Invalid_attribute ->
|
||||
"Invalid attribute."
|
||||
|
||||
@ -454,7 +464,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
|
||||
let format_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value
|
||||
and reg = region#to_string ~file ~offsets mode in
|
||||
let value = sprintf "Lexical error %s:\n%s" reg msg
|
||||
let value = sprintf "Lexical error %s:\n%s\n" reg msg
|
||||
in Region.{value; region}
|
||||
|
||||
let fail region value = raise (Error Region.{region; value})
|
||||
@ -618,16 +628,16 @@ rule init state = parse
|
||||
and scan state = parse
|
||||
nl { scan (push_newline state lexbuf) lexbuf }
|
||||
| ' '+ { scan (push_space state lexbuf) lexbuf }
|
||||
| '\t'+ { scan (push_tabs state lexbuf) lexbuf }
|
||||
| '\t'+ { scan (push_tabs state lexbuf) lexbuf }
|
||||
| ident { mk_ident state lexbuf |> enqueue }
|
||||
| constr { mk_constr state lexbuf |> enqueue }
|
||||
| bytes { mk_bytes seq state lexbuf |> enqueue }
|
||||
| natural 'n' { mk_nat state lexbuf |> enqueue }
|
||||
| natural "mutez" { mk_mutez state lexbuf |> enqueue }
|
||||
| natural "tz"
|
||||
| natural "tez" { mk_tez state lexbuf |> enqueue }
|
||||
| natural "tez" { mk_tez state lexbuf |> enqueue }
|
||||
| decimal "tz"
|
||||
| decimal "tez" { mk_tez_decimal state lexbuf |> enqueue }
|
||||
| decimal "tez" { mk_tez_decimal state lexbuf |> enqueue }
|
||||
| natural { mk_int state lexbuf |> enqueue }
|
||||
| symbol { mk_sym state lexbuf |> enqueue }
|
||||
| eof { mk_eof state lexbuf |> enqueue }
|
||||
@ -638,31 +648,43 @@ and scan state = parse
|
||||
let thread = {opening; len=1; acc=['"']} in
|
||||
scan_string thread state lexbuf |> mk_string |> enqueue }
|
||||
|
||||
| "(*" { let opening, _, state = sync state lexbuf in
|
||||
let thread = {opening; len=2; acc=['*';'(']} in
|
||||
let state = scan_block thread state lexbuf |> push_block
|
||||
in scan state lexbuf }
|
||||
| "(*" { if state.lang = `PascaLIGO || state.lang = `CameLIGO then
|
||||
let opening, _, state = sync state lexbuf in
|
||||
let thread = {opening; len=2; acc=['*';'(']} in
|
||||
let state = scan_pascaligo_block thread state lexbuf |> push_block
|
||||
in scan state lexbuf
|
||||
else (rollback lexbuf; scan_two_sym state lexbuf)
|
||||
}
|
||||
|
||||
| "/*" { if state.lang = `ReasonLIGO then
|
||||
let opening, _, state = sync state lexbuf in
|
||||
let thread = {opening; len=2; acc=['*';'/']} in
|
||||
let state = scan_reasonligo_block thread state lexbuf |> push_block
|
||||
in scan state lexbuf
|
||||
else (rollback lexbuf; scan_two_sym state lexbuf)
|
||||
}
|
||||
|
||||
| "//" { let opening, _, state = sync state lexbuf in
|
||||
let thread = {opening; len=2; acc=['/';'/']} in
|
||||
let state = scan_line thread state lexbuf |> push_line
|
||||
in scan state lexbuf }
|
||||
|
||||
(* Management of #include CPP directives
|
||||
(* Management of #include preprocessing directives
|
||||
|
||||
An input LIGO program may contain GNU CPP (C preprocessor)
|
||||
directives, and the entry modules (named *Main.ml) run CPP on them
|
||||
in traditional mode:
|
||||
An input LIGO program may contain preprocessing directives, and
|
||||
the entry modules (named *Main.ml) run the preprocessor on them,
|
||||
as if using the GNU C preprocessor in traditional mode:
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Traditional-Mode.html
|
||||
|
||||
The main interest in using CPP is that it can stand for a poor
|
||||
man's (flat) module system for LIGO thanks to #include
|
||||
directives, and the traditional mode leaves the markup mostly
|
||||
undisturbed.
|
||||
The main interest in using a preprocessor is that it can stand
|
||||
for a poor man's (flat) module system for LIGO thanks to #include
|
||||
directives, and the equivalent of the traditional mode leaves the
|
||||
markup undisturbed.
|
||||
|
||||
Some of the #line resulting from processing #include directives
|
||||
deal with system file headers and thus have to be ignored for our
|
||||
Contrary to the C preprocessor, our preprocessor does not
|
||||
generate #line resulting from processing #include directives deal
|
||||
with system file headers and thus have to be ignored for our
|
||||
purpose. Moreover, these #line directives may also carry some
|
||||
additional flags:
|
||||
|
||||
@ -671,7 +693,7 @@ and scan state = parse
|
||||
of which 1 and 2 indicate, respectively, the start of a new file
|
||||
and the return from a file (after its inclusion has been
|
||||
processed).
|
||||
*)
|
||||
*)
|
||||
|
||||
| '#' blank* ("line" blank+)? (natural as line) blank+
|
||||
'"' (string as file) '"' {
|
||||
@ -714,6 +736,14 @@ and scan state = parse
|
||||
| _ as c { let region, _, _ = sync state lexbuf
|
||||
in fail region (Unexpected_character c) }
|
||||
|
||||
(* Scanning two symbols *)
|
||||
|
||||
and scan_two_sym state = parse
|
||||
symbol { scan_one_sym (mk_sym state lexbuf |> enqueue) lexbuf }
|
||||
|
||||
and scan_one_sym state = parse
|
||||
symbol { scan (mk_sym state lexbuf |> enqueue) lexbuf }
|
||||
|
||||
(* Scanning CPP #include flags *)
|
||||
|
||||
and scan_flags state acc = parse
|
||||
@ -745,39 +775,70 @@ and scan_string thread state = parse
|
||||
|
||||
(* Finishing a block comment
|
||||
|
||||
(Note for Emacs: ("(*")
|
||||
The lexing of block comments must take care of embedded block
|
||||
comments that may occur within, as well as strings, so no substring
|
||||
"*)" may inadvertently close the block. This is the purpose
|
||||
of the first case of the scanner [scan_block].
|
||||
(For Emacs: ("(*") The lexing of block comments must take care of
|
||||
embedded block comments that may occur within, as well as strings,
|
||||
so no substring "*/" or "*)" may inadvertently close the
|
||||
block. This is the purpose of the first case of the scanners
|
||||
[scan_pascaligo_block] and [scan_reasonligo_block].
|
||||
*)
|
||||
|
||||
and scan_block thread state = parse
|
||||
and scan_pascaligo_block thread state = parse
|
||||
'"' | "(*" { let opening = thread.opening in
|
||||
let opening', lexeme, state = sync state lexbuf in
|
||||
let thread = push_string lexeme thread in
|
||||
let thread = {thread with opening=opening'} in
|
||||
let next = if lexeme = "\"" then scan_string
|
||||
else scan_block in
|
||||
else scan_pascaligo_block in
|
||||
let thread, state = next thread state lexbuf in
|
||||
let thread = {thread with opening}
|
||||
in scan_block thread state lexbuf }
|
||||
in scan_pascaligo_block thread state lexbuf }
|
||||
| "*)" { let _, lexeme, state = sync state lexbuf
|
||||
in push_string lexeme thread, state }
|
||||
| nl as nl { let () = Lexing.new_line lexbuf
|
||||
and state = {state with pos = state.pos#new_line nl}
|
||||
and thread = push_string nl thread
|
||||
in scan_block thread state lexbuf }
|
||||
| eof { fail thread.opening Unterminated_comment }
|
||||
in scan_pascaligo_block thread state lexbuf }
|
||||
| eof { fail thread.opening (Unterminated_comment "*)") }
|
||||
| _ { let () = rollback lexbuf in
|
||||
let len = thread.len in
|
||||
let thread,
|
||||
status = scan_utf8 thread state lexbuf in
|
||||
status = scan_utf8 "*)" thread state lexbuf in
|
||||
let delta = thread.len - len in
|
||||
let pos = state.pos#shift_one_uchar delta in
|
||||
match status with
|
||||
None -> scan_block thread {state with pos} lexbuf
|
||||
| Some error ->
|
||||
Stdlib.Ok () ->
|
||||
scan_pascaligo_block thread {state with pos} lexbuf
|
||||
| Error error ->
|
||||
let region = Region.make ~start:state.pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
and scan_reasonligo_block thread state = parse
|
||||
'"' | "/*" { let opening = thread.opening in
|
||||
let opening', lexeme, state = sync state lexbuf in
|
||||
let thread = push_string lexeme thread in
|
||||
let thread = {thread with opening=opening'} in
|
||||
let next = if lexeme = "\"" then scan_string
|
||||
else scan_reasonligo_block in
|
||||
let thread, state = next thread state lexbuf in
|
||||
let thread = {thread with opening}
|
||||
in scan_reasonligo_block thread state lexbuf }
|
||||
| "*/" { let _, lexeme, state = sync state lexbuf
|
||||
in push_string lexeme thread, state }
|
||||
| nl as nl { let () = Lexing.new_line lexbuf
|
||||
and state = {state with pos = state.pos#new_line nl}
|
||||
and thread = push_string nl thread
|
||||
in scan_reasonligo_block thread state lexbuf }
|
||||
| eof { fail thread.opening (Unterminated_comment "*/") }
|
||||
| _ { let () = rollback lexbuf in
|
||||
let len = thread.len in
|
||||
let thread,
|
||||
status = scan_utf8 "*/" thread state lexbuf in
|
||||
let delta = thread.len - len in
|
||||
let pos = state.pos#shift_one_uchar delta in
|
||||
match status with
|
||||
Stdlib.Ok () ->
|
||||
scan_reasonligo_block thread {state with pos} lexbuf
|
||||
| Error error ->
|
||||
let region = Region.make ~start:state.pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
@ -792,24 +853,36 @@ and scan_line thread state = parse
|
||||
| _ { let () = rollback lexbuf in
|
||||
let len = thread.len in
|
||||
let thread,
|
||||
status = scan_utf8 thread state lexbuf in
|
||||
status = scan_utf8_inline thread state lexbuf in
|
||||
let delta = thread.len - len in
|
||||
let pos = state.pos#shift_one_uchar delta in
|
||||
match status with
|
||||
None -> scan_line thread {state with pos} lexbuf
|
||||
| Some error ->
|
||||
Stdlib.Ok () ->
|
||||
scan_line thread {state with pos} lexbuf
|
||||
| Error error ->
|
||||
let region = Region.make ~start:state.pos ~stop:pos
|
||||
in fail region error }
|
||||
|
||||
and scan_utf8 thread state = parse
|
||||
eof { fail thread.opening Unterminated_comment }
|
||||
and scan_utf8 closing thread state = parse
|
||||
eof { fail thread.opening (Unterminated_comment closing) }
|
||||
| _ as c { let thread = push_char c thread in
|
||||
let lexeme = Lexing.lexeme lexbuf in
|
||||
let () = state.supply (Bytes.of_string lexeme) 0 1 in
|
||||
match Uutf.decode state.decoder with
|
||||
`Uchar _ -> thread, None
|
||||
| `Malformed _ -> thread, Some Invalid_utf8_sequence
|
||||
| `Await -> scan_utf8 thread state lexbuf
|
||||
`Uchar _ -> thread, Stdlib.Ok ()
|
||||
| `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
|
||||
| `Await -> scan_utf8 closing thread state lexbuf
|
||||
| `End -> assert false }
|
||||
|
||||
and scan_utf8_inline thread state = parse
|
||||
eof { thread, Stdlib.Ok () }
|
||||
| _ as c { let thread = push_char c thread in
|
||||
let lexeme = Lexing.lexeme lexbuf in
|
||||
let () = state.supply (Bytes.of_string lexeme) 0 1 in
|
||||
match Uutf.decode state.decoder with
|
||||
`Uchar _ -> thread, Stdlib.Ok ()
|
||||
| `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
|
||||
| `Await -> scan_utf8_inline thread state lexbuf
|
||||
| `End -> assert false }
|
||||
|
||||
(* END LEXER DEFINITION *)
|
||||
@ -863,7 +936,14 @@ and scan_utf8 thread state = parse
|
||||
|
||||
type logger = Markup.t list -> token -> unit
|
||||
|
||||
type input =
|
||||
File of file_path
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type instance = {
|
||||
input : input;
|
||||
read : log:logger -> Lexing.lexbuf -> token;
|
||||
buffer : Lexing.lexbuf;
|
||||
get_win : unit -> window;
|
||||
@ -873,19 +953,29 @@ type instance = {
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
type input =
|
||||
File of file_path (* "-" means stdin *)
|
||||
| Stdin
|
||||
| String of string
|
||||
| Channel of in_channel
|
||||
| Buffer of Lexing.lexbuf
|
||||
|
||||
type open_err = File_opening of string
|
||||
|
||||
let open_token_stream input =
|
||||
let lexbuf_from_input = function
|
||||
File path ->
|
||||
(try
|
||||
let chan = open_in path in
|
||||
let close () = close_in chan in
|
||||
let lexbuf = Lexing.from_channel chan in
|
||||
let () =
|
||||
let open Lexing in
|
||||
lexbuf.lex_curr_p <- {lexbuf.lex_curr_p with pos_fname = path}
|
||||
in Ok (lexbuf, close)
|
||||
with Sys_error msg -> Stdlib.Error (File_opening msg))
|
||||
| String s ->
|
||||
Ok (Lexing.from_string s, fun () -> ())
|
||||
| Channel chan ->
|
||||
let close () = close_in chan in
|
||||
Ok (Lexing.from_channel chan, close)
|
||||
| Buffer b -> Ok (b, fun () -> ())
|
||||
|
||||
let open_token_stream (lang: language) input =
|
||||
let file_path = match input with
|
||||
File file_path ->
|
||||
if file_path = "-" then "" else file_path
|
||||
File path -> path
|
||||
| _ -> "" in
|
||||
let pos = Pos.min ~file:file_path in
|
||||
let buf_reg = ref (pos#byte, pos#byte)
|
||||
@ -898,7 +988,8 @@ let open_token_stream input =
|
||||
pos;
|
||||
markup = [];
|
||||
decoder;
|
||||
supply} in
|
||||
supply;
|
||||
lang} in
|
||||
|
||||
let get_pos () = !state.pos
|
||||
and get_last () = !state.last
|
||||
@ -966,32 +1057,14 @@ let open_token_stream input =
|
||||
check_right_context token buffer;
|
||||
patch_buffer (Token.to_region token)#byte_pos buffer;
|
||||
token in
|
||||
|
||||
let buf_close_res =
|
||||
match input with
|
||||
File "" | File "-" | Stdin ->
|
||||
Ok (Lexing.from_channel stdin, fun () -> close_in stdin)
|
||||
| File path ->
|
||||
(try
|
||||
let chan = open_in path in
|
||||
let close () = close_in chan in
|
||||
Ok (Lexing.from_channel chan, close)
|
||||
with
|
||||
Sys_error msg -> Stdlib.Error (File_opening msg))
|
||||
| String s ->
|
||||
Ok (Lexing.from_string s, fun () -> ())
|
||||
| Channel chan ->
|
||||
let close () = close_in chan in
|
||||
Ok (Lexing.from_channel chan, close)
|
||||
| Buffer b -> Ok (b, fun () -> ()) in
|
||||
match buf_close_res with
|
||||
match lexbuf_from_input input with
|
||||
Ok (buffer, close) ->
|
||||
let () =
|
||||
match input with
|
||||
File path when path <> "" -> reset ~file:path buffer
|
||||
| _ -> () in
|
||||
let instance = {
|
||||
read; buffer; get_win; get_pos; get_last; get_file; close}
|
||||
input; read; buffer; get_win; get_pos; get_last; get_file; close}
|
||||
in Ok instance
|
||||
| Error _ as e -> e
|
||||
|
||||
|
@ -7,15 +7,22 @@ module type S =
|
||||
module Lexer : Lexer.S
|
||||
|
||||
val output_token :
|
||||
?offsets:bool -> [`Byte | `Point] ->
|
||||
EvalOpt.command -> out_channel ->
|
||||
Markup.t list -> Lexer.token -> unit
|
||||
?offsets:bool ->
|
||||
[`Byte | `Point] ->
|
||||
EvalOpt.command ->
|
||||
out_channel ->
|
||||
Markup.t list ->
|
||||
Lexer.token ->
|
||||
unit
|
||||
|
||||
type file_path = string
|
||||
|
||||
val trace :
|
||||
?offsets:bool -> [`Byte | `Point] ->
|
||||
file_path option -> EvalOpt.command ->
|
||||
?offsets:bool ->
|
||||
[`Byte | `Point] ->
|
||||
EvalOpt.language ->
|
||||
Lexer.input ->
|
||||
EvalOpt.command ->
|
||||
(unit, string Region.reg) Stdlib.result
|
||||
end
|
||||
|
||||
@ -49,16 +56,12 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
|
||||
|
||||
type file_path = string
|
||||
|
||||
let trace ?(offsets=true) mode file_path_opt command :
|
||||
let trace ?(offsets=true) mode lang input command :
|
||||
(unit, string Region.reg) Stdlib.result =
|
||||
let input =
|
||||
match file_path_opt with
|
||||
Some file_path -> Lexer.File file_path
|
||||
| None -> Lexer.Stdin in
|
||||
match Lexer.open_token_stream input with
|
||||
match Lexer.open_token_stream lang input with
|
||||
Ok Lexer.{read; buffer; close; _} ->
|
||||
let log = output_token ~offsets mode command stdout
|
||||
and close_all () = close (); close_out stdout in
|
||||
and close_all () = flush_all (); close () in
|
||||
let rec iter () =
|
||||
match read ~log buffer with
|
||||
token ->
|
||||
@ -66,15 +69,11 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
|
||||
then Stdlib.Ok ()
|
||||
else iter ()
|
||||
| exception Lexer.Error error ->
|
||||
let file =
|
||||
match file_path_opt with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true in
|
||||
let msg =
|
||||
Lexer.format_error ~offsets mode ~file error
|
||||
Lexer.format_error ~offsets mode ~file:true error
|
||||
in Stdlib.Error msg in
|
||||
let result = iter ()
|
||||
in close_all (); result
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
|
||||
flush_all (); Stdlib.Error (Region.wrap_ghost msg)
|
||||
end
|
||||
|
@ -5,15 +5,22 @@ module type S =
|
||||
module Lexer : Lexer.S
|
||||
|
||||
val output_token :
|
||||
?offsets:bool -> [`Byte | `Point] ->
|
||||
EvalOpt.command -> out_channel ->
|
||||
Markup.t list -> Lexer.token -> unit
|
||||
?offsets:bool ->
|
||||
[`Byte | `Point] ->
|
||||
EvalOpt.command ->
|
||||
out_channel ->
|
||||
Markup.t list ->
|
||||
Lexer.token ->
|
||||
unit
|
||||
|
||||
type file_path = string
|
||||
|
||||
val trace :
|
||||
?offsets:bool -> [`Byte | `Point] ->
|
||||
file_path option -> EvalOpt.command ->
|
||||
?offsets:bool ->
|
||||
[`Byte | `Point] ->
|
||||
EvalOpt.language ->
|
||||
Lexer.input ->
|
||||
EvalOpt.command ->
|
||||
(unit, string Region.reg) Stdlib.result
|
||||
end
|
||||
|
||||
|
@ -1,110 +1,112 @@
|
||||
(* Functor to build a standalone LIGO lexer *)
|
||||
(* Functor to build a LIGO lexer *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Preproc = Preprocessor.Preproc
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
end
|
||||
|
||||
module Make (IO: IO) (Lexer: Lexer.S) =
|
||||
struct
|
||||
open Printf
|
||||
module SSet = Utils.String.Set
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
|
||||
let lib_path =
|
||||
match IO.options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ IO.ext
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match IO.options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
(* Running the lexer on the input file *)
|
||||
(* Preprocessing and lexing the input source *)
|
||||
|
||||
let scan () : (Lexer.token list, string Region.reg) Stdlib.result =
|
||||
(* Preprocessing the input *)
|
||||
(* Preprocessing the input source *)
|
||||
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd
|
||||
else ();
|
||||
let preproc cin =
|
||||
let buffer = Lexing.from_channel cin in
|
||||
let open Lexing in
|
||||
let () =
|
||||
match IO.options#input with
|
||||
None -> ()
|
||||
| Some pos_fname ->
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
|
||||
let opt = (IO.options :> Preprocessor.EvalOpt.options) in
|
||||
match Preproc.lex opt buffer with
|
||||
Stdlib.Error (pp_buffer, err) ->
|
||||
if SSet.mem "preproc" IO.options#verbose then
|
||||
Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
|
||||
let formatted =
|
||||
Preproc.format ~offsets:IO.options#offsets ~file:true err
|
||||
in Stdlib.Error formatted
|
||||
| Stdlib.Ok pp_buffer ->
|
||||
(* Running the lexer on the preprocessed input *)
|
||||
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
let msg =
|
||||
sprintf "External error: the command \"%s\" failed." cpp_cmd
|
||||
in Stdlib.Error (Region.wrap_ghost msg)
|
||||
else
|
||||
match Lexer.open_token_stream (Lexer.File pp_input) with
|
||||
Ok Lexer.{read; buffer; close; _} ->
|
||||
let close_all () = close (); close_out stdout in
|
||||
let rec read_tokens tokens =
|
||||
match read ~log:(fun _ _ -> ()) buffer with
|
||||
token ->
|
||||
if Lexer.Token.is_eof token
|
||||
then Stdlib.Ok (List.rev tokens)
|
||||
else read_tokens (token::tokens)
|
||||
| exception Lexer.Error error ->
|
||||
let file =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true in
|
||||
let msg =
|
||||
Lexer.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode ~file error
|
||||
in Stdlib.Error msg in
|
||||
let result = read_tokens []
|
||||
in close_all (); result
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
|
||||
let source = Lexer.String (Buffer.contents pp_buffer) in
|
||||
match Lexer.open_token_stream IO.options#lang source with
|
||||
Ok Lexer.{read; buffer; close; _} ->
|
||||
let close_all () = flush_all (); close () in
|
||||
let rec read_tokens tokens =
|
||||
match read ~log:(fun _ _ -> ()) buffer with
|
||||
token ->
|
||||
if Lexer.Token.is_eof token
|
||||
then Stdlib.Ok (List.rev tokens)
|
||||
else read_tokens (token::tokens)
|
||||
| exception Lexer.Error error ->
|
||||
let file =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true in
|
||||
let () =
|
||||
Printf.eprintf "[LexerUnit] file = %b\n%!" file in
|
||||
let msg =
|
||||
Lexer.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode ~file error
|
||||
in Stdlib.Error msg in
|
||||
let result = read_tokens []
|
||||
in close_all (); result
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
flush_all (); Stdlib.Error (Region.wrap_ghost msg) in
|
||||
match IO.options#input with
|
||||
None -> preproc stdin
|
||||
| Some file_path ->
|
||||
try open_in file_path |> preproc with
|
||||
Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg)
|
||||
|
||||
(* Tracing the lexing (effectful) *)
|
||||
(* Tracing the lexing *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
|
||||
let trace () : (unit, string Region.reg) Stdlib.result =
|
||||
(* Preprocessing the input *)
|
||||
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd
|
||||
else ();
|
||||
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
let msg =
|
||||
sprintf "External error: the command \"%s\" failed." cpp_cmd
|
||||
in Stdlib.Error (Region.wrap_ghost msg)
|
||||
else
|
||||
Log.trace ~offsets:IO.options#offsets
|
||||
IO.options#mode
|
||||
(Some pp_input)
|
||||
IO.options#cmd
|
||||
|
||||
let preproc cin =
|
||||
let buffer = Lexing.from_channel cin in
|
||||
let open Lexing in
|
||||
let () =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> ()
|
||||
| Some pos_fname ->
|
||||
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
|
||||
let opt = (IO.options :> Preprocessor.EvalOpt.options) in
|
||||
match Preproc.lex opt buffer with
|
||||
Stdlib.Error (pp_buffer, err) ->
|
||||
if SSet.mem "preproc" IO.options#verbose then
|
||||
Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
|
||||
let formatted =
|
||||
Preproc.format ~offsets:IO.options#offsets ~file:true err
|
||||
in Stdlib.Error formatted
|
||||
| Stdlib.Ok pp_buffer ->
|
||||
let preproc_str = Buffer.contents pp_buffer in
|
||||
if SSet.mem "preproc" IO.options#verbose then
|
||||
begin
|
||||
Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
|
||||
Stdlib.Ok ()
|
||||
end
|
||||
else Log.trace ~offsets:IO.options#offsets
|
||||
IO.options#mode
|
||||
IO.options#lang
|
||||
(Lexer.String preproc_str)
|
||||
IO.options#cmd
|
||||
in match IO.options#input with
|
||||
None -> preproc stdin
|
||||
| Some file_path ->
|
||||
try open_in file_path |> preproc with
|
||||
Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg)
|
||||
end
|
||||
|
@ -4,7 +4,6 @@ module Region = Simple_utils.Region
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
end
|
||||
|
||||
|
@ -2,10 +2,15 @@
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type options = <
|
||||
offsets : bool;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command
|
||||
>
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
val options : options
|
||||
end
|
||||
|
||||
module type PARSER =
|
||||
@ -50,7 +55,7 @@ module type PARSER =
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (IO : IO)
|
||||
module Make (IO: IO)
|
||||
(Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token)
|
||||
(ParErr: sig val message : int -> string end) =
|
||||
@ -95,14 +100,15 @@ module Make (IO : IO)
|
||||
None ->
|
||||
if Lexer.Token.is_eof invalid then ""
|
||||
else let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
||||
Printf.sprintf ", at \"%s\"" invalid_lexeme
|
||||
| Some valid ->
|
||||
let valid_lexeme = Lexer.Token.to_lexeme valid in
|
||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
||||
if Lexer.Token.is_eof invalid then s
|
||||
if Lexer.Token.is_eof invalid then
|
||||
Printf.sprintf ", after \"%s\"" valid_lexeme
|
||||
else
|
||||
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
||||
Printf.sprintf " at \"%s\", after \"%s\""
|
||||
invalid_lexeme valid_lexeme in
|
||||
let header = header ^ trailer in
|
||||
let msg =
|
||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
||||
@ -110,9 +116,9 @@ module Make (IO : IO)
|
||||
|
||||
let failure get_win checkpoint =
|
||||
let message = ParErr.message (state checkpoint) in
|
||||
let message = if message = "<YOUR SYNTAX ERROR MESSAGE HERE>\n" then
|
||||
let message = if message = "<YOUR SYNTAX ERROR MESSAGE HERE>\n" then
|
||||
(string_of_int (state checkpoint)) ^ ": <syntax error>"
|
||||
else
|
||||
else
|
||||
message
|
||||
in
|
||||
match get_win () with
|
||||
@ -133,20 +139,21 @@ module Make (IO : IO)
|
||||
module Incr = Parser.Incremental
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
let log = Log.output_token ~offsets:IO.options#offsets
|
||||
IO.options#mode IO.options#cmd stdout
|
||||
let log = Log.output_token
|
||||
~offsets:IO.options#offsets
|
||||
IO.options#mode IO.options#cmd stdout
|
||||
|
||||
let incr_contract Lexer.{read; buffer; get_win; close; _} =
|
||||
let supplier = I.lexer_lexbuf_to_supplier (read ~log) buffer
|
||||
and failure = failure get_win in
|
||||
let parser = Incr.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success failure supplier parser
|
||||
in close (); ast
|
||||
in flush_all (); close (); ast
|
||||
|
||||
let incr_expr Lexer.{read; buffer; get_win; close; _} =
|
||||
let supplier = I.lexer_lexbuf_to_supplier (read ~log) buffer
|
||||
and failure = failure get_win in
|
||||
let parser = Incr.interactive_expr buffer.Lexing.lex_curr_p in
|
||||
let expr = I.loop_handle success failure supplier parser
|
||||
in close (); expr
|
||||
in flush_all (); close (); expr
|
||||
end
|
||||
|
@ -2,10 +2,15 @@
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
type options = <
|
||||
offsets : bool;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command
|
||||
>
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
val options : options
|
||||
end
|
||||
|
||||
(* The signature generated by Menhir with additional type definitions
|
||||
|
@ -1,11 +1,26 @@
|
||||
(* Functor to build a standalone LIGO parser *)
|
||||
(* Functor to build a LIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Region = Simple_utils.Region
|
||||
module Preproc = Preprocessor.Preproc
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
module type IO =
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
module type SubIO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".ligo", ".mligo", ".religo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
val options : options
|
||||
val make : input:string option -> expr:bool -> EvalOpt.options
|
||||
end
|
||||
|
||||
module type Pretty =
|
||||
@ -32,18 +47,18 @@ module Make (Lexer: Lexer.S)
|
||||
(ParErr: sig val message : int -> string end)
|
||||
(ParserLog: Pretty with type ast = AST.t
|
||||
and type expr = AST.expr)
|
||||
(IO: IO) =
|
||||
(SubIO: SubIO) =
|
||||
struct
|
||||
open Printf
|
||||
module SSet = Utils.String.Set
|
||||
module SSet = Set.Make (String)
|
||||
|
||||
(* Log of the lexer *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
|
||||
let log =
|
||||
Log.output_token ~offsets:IO.options#offsets
|
||||
IO.options#mode IO.options#cmd stdout
|
||||
Log.output_token ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode SubIO.options#cmd stdout
|
||||
|
||||
(* Error handling (reexported from [ParserAPI]) *)
|
||||
|
||||
@ -54,7 +69,12 @@ module Make (Lexer: Lexer.S)
|
||||
|
||||
(* Instantiating the parser *)
|
||||
|
||||
module Front = ParserAPI.Make (IO)(Lexer)(Parser)(ParErr)
|
||||
module API_IO =
|
||||
struct
|
||||
let options = (SubIO.options :> ParserAPI.options)
|
||||
end
|
||||
|
||||
module Front = ParserAPI.Make (API_IO)(Lexer)(Parser)(ParErr)
|
||||
|
||||
let format_error = Front.format_error
|
||||
|
||||
@ -67,13 +87,13 @@ module Make (Lexer: Lexer.S)
|
||||
(AST.expr, message Region.reg) Stdlib.result =
|
||||
let output = Buffer.create 131 in
|
||||
let state =
|
||||
ParserLog.mk_state ~offsets:IO.options#offsets
|
||||
~mode:IO.options#mode
|
||||
ParserLog.mk_state ~offsets:SubIO.options#offsets
|
||||
~mode:SubIO.options#mode
|
||||
~buffer:output in
|
||||
let close () = lexer_inst.Lexer.close () in
|
||||
let expr =
|
||||
try
|
||||
if IO.options#mono then
|
||||
if SubIO.options#mono then
|
||||
let tokeniser = lexer_inst.Lexer.read ~log
|
||||
and lexbuf = lexer_inst.Lexer.buffer
|
||||
in Front.mono_expr tokeniser lexbuf
|
||||
@ -81,20 +101,20 @@ module Make (Lexer: Lexer.S)
|
||||
Front.incr_expr lexer_inst
|
||||
with exn -> close (); raise exn in
|
||||
let () =
|
||||
if SSet.mem "ast-tokens" IO.options#verbose then
|
||||
if SSet.mem "ast-tokens" SubIO.options#verbose then
|
||||
begin
|
||||
Buffer.clear output;
|
||||
ParserLog.print_expr state expr;
|
||||
Buffer.output_buffer stdout output
|
||||
end in
|
||||
let () =
|
||||
if SSet.mem "ast" IO.options#verbose then
|
||||
if SSet.mem "ast" SubIO.options#verbose then
|
||||
begin
|
||||
Buffer.clear output;
|
||||
ParserLog.pp_expr state expr;
|
||||
Buffer.output_buffer stdout output
|
||||
end
|
||||
in close (); Ok expr
|
||||
in flush_all (); close (); Ok expr
|
||||
|
||||
(* Parsing a contract *)
|
||||
|
||||
@ -102,13 +122,13 @@ module Make (Lexer: Lexer.S)
|
||||
(AST.t, message Region.reg) Stdlib.result =
|
||||
let output = Buffer.create 131 in
|
||||
let state =
|
||||
ParserLog.mk_state ~offsets:IO.options#offsets
|
||||
~mode:IO.options#mode
|
||||
ParserLog.mk_state ~offsets:SubIO.options#offsets
|
||||
~mode:SubIO.options#mode
|
||||
~buffer:output in
|
||||
let close () = lexer_inst.Lexer.close () in
|
||||
let ast =
|
||||
try
|
||||
if IO.options#mono then
|
||||
if SubIO.options#mono then
|
||||
let tokeniser = lexer_inst.Lexer.read ~log
|
||||
and lexbuf = lexer_inst.Lexer.buffer
|
||||
in Front.mono_contract tokeniser lexbuf
|
||||
@ -116,25 +136,23 @@ module Make (Lexer: Lexer.S)
|
||||
Front.incr_contract lexer_inst
|
||||
with exn -> close (); raise exn in
|
||||
let () =
|
||||
if SSet.mem "ast-tokens" IO.options#verbose then
|
||||
if SSet.mem "ast-tokens" SubIO.options#verbose then
|
||||
begin
|
||||
Buffer.clear output;
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout output
|
||||
end in
|
||||
let () =
|
||||
if SSet.mem "ast" IO.options#verbose then
|
||||
if SSet.mem "ast" SubIO.options#verbose then
|
||||
begin
|
||||
Buffer.clear output;
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout output
|
||||
end
|
||||
in close (); Ok ast
|
||||
in flush_all (); close (); Ok ast
|
||||
|
||||
(* Wrapper for the parsers above *)
|
||||
|
||||
type 'a parser = Lexer.instance -> ('a, message Region.reg) result
|
||||
|
||||
let apply lexer_inst parser =
|
||||
(* Calling the parser and filtering errors *)
|
||||
|
||||
@ -146,20 +164,18 @@ module Make (Lexer: Lexer.S)
|
||||
|
||||
| exception Lexer.Error err ->
|
||||
let file =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true in
|
||||
lexer_inst.Lexer.buffer.Lexing.lex_curr_p.Lexing.pos_fname in
|
||||
let error =
|
||||
Lexer.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode err ~file
|
||||
Lexer.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode err ~file:(file <> "")
|
||||
in Stdlib.Error error
|
||||
|
||||
(* Incremental API of Menhir *)
|
||||
|
||||
| exception Front.Point point ->
|
||||
let error =
|
||||
Front.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
Front.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode point
|
||||
in Stdlib.Error error
|
||||
|
||||
(* Monolithic API of Menhir *)
|
||||
@ -169,16 +185,106 @@ module Make (Lexer: Lexer.S)
|
||||
match lexer_inst.Lexer.get_win () with
|
||||
Lexer.Nil ->
|
||||
assert false (* Safe: There is always at least EOF. *)
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
let point = "", valid_opt, invalid in
|
||||
let error =
|
||||
Front.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
Front.format_error ~offsets:SubIO.options#offsets
|
||||
SubIO.options#mode point
|
||||
in Stdlib.Error error
|
||||
|
||||
(* I/O errors *)
|
||||
|
||||
| exception Sys_error error ->
|
||||
Stdlib.Error (Region.wrap_ghost error)
|
||||
flush_all (); Stdlib.Error (Region.wrap_ghost error)
|
||||
|
||||
(* Preprocessing the input source *)
|
||||
|
||||
let preproc options lexbuf =
|
||||
Preproc.lex (options :> Preprocessor.EvalOpt.options) lexbuf
|
||||
|
||||
(* Parsing a contract *)
|
||||
|
||||
let gen_parser options input parser =
|
||||
match Lexer.lexbuf_from_input input with
|
||||
Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Stdlib.Error (Region.wrap_ghost msg)
|
||||
| Ok (lexbuf, close) ->
|
||||
(* Preprocessing the input source *)
|
||||
let file = Lexing.(lexbuf.lex_curr_p.pos_fname) in
|
||||
match preproc options lexbuf with
|
||||
Stdlib.Error (pp_buffer, err) ->
|
||||
if SSet.mem "preproc" options#verbose then
|
||||
Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
|
||||
let formatted =
|
||||
Preproc.format ~offsets:options#offsets
|
||||
~file:(file <> "")
|
||||
err
|
||||
in close (); Stdlib.Error formatted
|
||||
| Stdlib.Ok buffer ->
|
||||
(* Lexing and parsing the preprocessed input source *)
|
||||
|
||||
let () = close () in
|
||||
let input' = Lexer.String (Buffer.contents buffer) in
|
||||
match Lexer.open_token_stream options#lang input' with
|
||||
Ok instance ->
|
||||
let open Lexing in
|
||||
instance.Lexer.buffer.lex_curr_p <-
|
||||
{instance.Lexer.buffer.lex_curr_p with pos_fname = file};
|
||||
apply instance parser
|
||||
| Stdlib.Error (Lexer.File_opening msg) ->
|
||||
Stdlib.Error (Region.wrap_ghost msg)
|
||||
|
||||
(* Parsing a contract in a file *)
|
||||
|
||||
let contract_in_file (source : string) =
|
||||
let options = SubIO.make ~input:(Some source) ~expr:false
|
||||
in gen_parser options (Lexer.File source) parse_contract
|
||||
|
||||
(* Parsing a contract in a string *)
|
||||
|
||||
let contract_in_string (source : string) =
|
||||
let options = SubIO.make ~input:None ~expr:false in
|
||||
gen_parser options (Lexer.String source) parse_contract
|
||||
|
||||
(* Parsing a contract in stdin *)
|
||||
|
||||
let contract_in_stdin () =
|
||||
let options = SubIO.make ~input:None ~expr:false in
|
||||
gen_parser options (Lexer.Channel stdin) parse_contract
|
||||
|
||||
(* Parsing an expression in a string *)
|
||||
|
||||
let expr_in_string (source : string) =
|
||||
let options = SubIO.make ~input:None ~expr:true in
|
||||
gen_parser options (Lexer.String source) parse_expr
|
||||
|
||||
(* Parsing an expression in stdin *)
|
||||
|
||||
let expr_in_stdin () =
|
||||
let options = SubIO.make ~input:None ~expr:true in
|
||||
gen_parser options (Lexer.Channel stdin) parse_expr
|
||||
|
||||
(* Preprocess only *)
|
||||
|
||||
let preprocess (source : string) =
|
||||
let options = SubIO.make ~input:(Some source) ~expr:false in
|
||||
try
|
||||
let cin = open_in source in
|
||||
let lexbuf = Lexing.from_channel cin in
|
||||
let () =
|
||||
let open Lexing in
|
||||
lexbuf.lex_curr_p <- {lexbuf.lex_curr_p with pos_fname=source}
|
||||
and options = (options :> Preprocessor.EvalOpt.options) in
|
||||
match Preprocessor.Preproc.lex options lexbuf with
|
||||
Stdlib.Ok _ as ok -> ok
|
||||
| Error (_, err) ->
|
||||
let formatted =
|
||||
Preproc.format ~offsets:options#offsets
|
||||
~file:true
|
||||
err
|
||||
in close_in cin; Stdlib.Error formatted
|
||||
with Sys_error error ->
|
||||
flush_all (); Stdlib.Error (Region.wrap_ghost error)
|
||||
|
||||
end
|
||||
|
@ -2,10 +2,25 @@
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
module type IO =
|
||||
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
|
||||
|
||||
module SSet : Set.S with type elt = string and type t = Set.Make(String).t
|
||||
|
||||
module type SubIO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
type options = <
|
||||
libs : string list;
|
||||
verbose : SSet.t;
|
||||
offsets : bool;
|
||||
lang : language;
|
||||
ext : string; (* ".ligo", ".mligo", ".religo" *)
|
||||
mode : [`Byte | `Point];
|
||||
cmd : EvalOpt.command;
|
||||
mono : bool
|
||||
>
|
||||
|
||||
val options : options
|
||||
val make : input:string option -> expr:bool -> EvalOpt.options
|
||||
end
|
||||
|
||||
module type Pretty =
|
||||
@ -32,7 +47,7 @@ module Make (Lexer : Lexer.S)
|
||||
(ParErr : sig val message : int -> string end)
|
||||
(ParserLog : Pretty with type ast = AST.t
|
||||
and type expr = AST.expr)
|
||||
(IO: IO) :
|
||||
(SubIO: SubIO) :
|
||||
sig
|
||||
(* Error handling reexported from [ParserAPI] without the
|
||||
exception [Point] *)
|
||||
@ -50,10 +65,21 @@ module Make (Lexer : Lexer.S)
|
||||
|
||||
(* Parsers *)
|
||||
|
||||
type 'a parser = Lexer.instance -> ('a, message Region.reg) result
|
||||
val contract_in_file :
|
||||
string -> (AST.t, message Region.reg) Stdlib.result
|
||||
|
||||
val apply : Lexer.instance -> 'a parser -> ('a, message Region.reg) result
|
||||
val contract_in_string :
|
||||
string -> (AST.t, message Region.reg) Stdlib.result
|
||||
|
||||
val parse_contract : AST.t parser
|
||||
val parse_expr : AST.expr parser
|
||||
end
|
||||
val contract_in_stdin :
|
||||
unit -> (AST.t, message Region.reg) Stdlib.result
|
||||
|
||||
val expr_in_string :
|
||||
string -> (AST.expr, message Region.reg) Stdlib.result
|
||||
|
||||
val expr_in_stdin :
|
||||
unit -> (AST.expr, message Region.reg) Stdlib.result
|
||||
|
||||
val preprocess :
|
||||
string -> (Buffer.t, message Region.reg) Stdlib.result
|
||||
end
|
||||
|
@ -8,7 +8,8 @@
|
||||
simple-utils
|
||||
uutf
|
||||
getopt
|
||||
zarith)
|
||||
zarith
|
||||
Preprocessor)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules
|
||||
@ -17,8 +18,8 @@
|
||||
ParserAPI
|
||||
Lexer
|
||||
LexerLog
|
||||
Utils
|
||||
Markup
|
||||
Utils
|
||||
FQueue
|
||||
EvalOpt
|
||||
Version))
|
||||
|
@ -1,7 +1,7 @@
|
||||
open Trace
|
||||
open Ligo_interpreter.Types
|
||||
open Ligo_interpreter.Combinators
|
||||
include Stage_common.Types
|
||||
include Ast_typed.Types
|
||||
|
||||
module Env = Ligo_interpreter.Environment
|
||||
|
||||
@ -210,7 +210,7 @@ let rec apply_operator : Ast_typed.constant' -> value list -> value result =
|
||||
| ( C_SET_MEM , [ v ; V_Set (elts) ] ) -> ok @@ v_bool (List.mem v elts)
|
||||
| ( C_SET_REMOVE , [ v ; V_Set (elts) ] ) -> ok @@ V_Set (List.filter (fun el -> not (el = v)) elts)
|
||||
| _ ->
|
||||
let () = Format.printf "%a\n" Stage_common.PP.constant c in
|
||||
let () = Format.printf "%a\n" Ast_typed.PP.constant c in
|
||||
let () = List.iter ( fun e -> Format.printf "%s\n" (Ligo_interpreter.PP.pp_value e)) operands in
|
||||
simple_fail "Unsupported constant op"
|
||||
)
|
||||
@ -338,25 +338,24 @@ and eval : Ast_typed.expression -> env -> value result
|
||||
| Match_list cases , V_List [] ->
|
||||
eval cases.match_nil env
|
||||
| Match_list cases , V_List (head::tail) ->
|
||||
let (head_var,tail_var,body,_) = cases.match_cons in
|
||||
let env' = Env.extend (Env.extend env (head_var,head)) (tail_var, V_List tail) in
|
||||
let {hd;tl;body;tv=_} = cases.match_cons in
|
||||
let env' = Env.extend (Env.extend env (hd,head)) (tl, V_List tail) in
|
||||
eval body env'
|
||||
| Match_variant (case_list , _) , V_Construct (matched_c , proj) ->
|
||||
let ((_, var) , body) =
|
||||
| Match_variant {cases ; tv=_} , V_Construct (matched_c , proj) ->
|
||||
let {constructor=_ ; pattern ; body} =
|
||||
List.find
|
||||
(fun case ->
|
||||
let (Constructor c , _) = fst case in
|
||||
(fun {constructor = (Constructor c) ; pattern=_ ; body=_} ->
|
||||
String.equal matched_c c)
|
||||
case_list in
|
||||
let env' = Env.extend env (var, proj) in
|
||||
cases in
|
||||
let env' = Env.extend env (pattern, proj) in
|
||||
eval body env'
|
||||
| Match_bool cases , V_Ct (C_bool true) ->
|
||||
eval cases.match_true env
|
||||
| Match_bool cases , V_Ct (C_bool false) ->
|
||||
eval cases.match_false env
|
||||
| Match_option cases, V_Construct ("Some" , proj) ->
|
||||
let (var,body,_) = cases.match_some in
|
||||
let env' = Env.extend env (var,proj) in
|
||||
let {opt;body;tv=_} = cases.match_some in
|
||||
let env' = Env.extend env (opt,proj) in
|
||||
eval body env'
|
||||
| Match_option cases, V_Construct ("None" , V_Ct C_unit) ->
|
||||
eval cases.match_none env
|
||||
@ -370,16 +369,16 @@ let dummy : Ast_typed.program -> string result =
|
||||
fun prg ->
|
||||
let%bind (res,_) = bind_fold_list
|
||||
(fun (pp,top_env) el ->
|
||||
let (Ast_typed.Declaration_constant (exp_name, exp , _ , _)) = Location.unwrap el in
|
||||
let (Ast_typed.Declaration_constant {binder; expr ; inline=_ ; _}) = Location.unwrap el in
|
||||
let%bind v =
|
||||
(*TODO This TRY-CATCH is here until we properly implement effects*)
|
||||
try
|
||||
eval exp top_env
|
||||
eval expr top_env
|
||||
with Temporary_hack s -> ok @@ V_Failure s
|
||||
(*TODO This TRY-CATCH is here until we properly implement effects*)
|
||||
in
|
||||
let pp' = pp^"\n val "^(Var.to_name exp_name)^" = "^(Ligo_interpreter.PP.pp_value v) in
|
||||
let top_env' = Env.extend top_env (exp_name, v) in
|
||||
let pp' = pp^"\n val "^(Var.to_name binder)^" = "^(Ligo_interpreter.PP.pp_value v) in
|
||||
let top_env' = Env.extend top_env (binder, v) in
|
||||
ok @@ (pp',top_env')
|
||||
)
|
||||
("",Env.empty_env) prg in
|
||||
|
@ -3,7 +3,9 @@ module Append_tree = Tree.Append
|
||||
|
||||
open Trace
|
||||
open Mini_c
|
||||
open Stage_common.Types (*Todo : to remove *)
|
||||
(* open Stage_common.Types (\*Todo : to remove *\) *)
|
||||
module LMap = AST.Types.LMap
|
||||
module CMap = AST.Types.CMap
|
||||
|
||||
let list_of_lmap m = List.rev @@ LMap.fold (fun _ v prev -> v :: prev) m []
|
||||
let kv_list_of_lmap m = List.rev @@ LMap.fold (fun k v prev -> (k, v) :: prev) m []
|
||||
@ -25,7 +27,7 @@ let extract_constructor (v : value) (tree : _ Append_tree.t') : (string * value
|
||||
let open Append_tree in
|
||||
let rec aux tv : (string * value * AST.type_expression) result=
|
||||
match tv with
|
||||
| Leaf (Constructor k, t), v -> ok (k, v, t)
|
||||
| Leaf (Ast_typed.Constructor k, t), v -> ok (k, v, t)
|
||||
| Node {a}, D_left v -> aux (a, v)
|
||||
| Node {b}, D_right v -> aux (b, v)
|
||||
| _ -> fail @@ internal_assertion_failure "bad constructor path"
|
||||
|
@ -114,6 +114,121 @@ them. please report this to the developers." in
|
||||
end
|
||||
open Errors
|
||||
|
||||
let transpile_constant' : AST.constant' -> constant' = function
|
||||
| C_INT -> C_INT
|
||||
| C_UNIT -> C_UNIT
|
||||
| C_NIL -> C_NIL
|
||||
| C_NOW -> C_NOW
|
||||
| C_IS_NAT -> C_IS_NAT
|
||||
| C_SOME -> C_SOME
|
||||
| C_NONE -> C_NONE
|
||||
| C_ASSERTION -> C_ASSERTION
|
||||
| C_ASSERT_INFERRED -> C_ASSERT_INFERRED
|
||||
| C_FAILWITH -> C_FAILWITH
|
||||
| C_UPDATE -> C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER -> C_ITER
|
||||
| C_FOLD_WHILE -> C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE -> C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP -> C_FOLD_STOP
|
||||
| C_LOOP_LEFT -> C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE -> C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP -> C_LOOP_STOP
|
||||
| C_FOLD -> C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG -> C_NEG
|
||||
| C_ABS -> C_ABS
|
||||
| C_ADD -> C_ADD
|
||||
| C_SUB -> C_SUB
|
||||
| C_MUL -> C_MUL
|
||||
| C_EDIV -> C_EDIV
|
||||
| C_DIV -> C_DIV
|
||||
| C_MOD -> C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT -> C_NOT
|
||||
| C_AND -> C_AND
|
||||
| C_OR -> C_OR
|
||||
| C_XOR -> C_XOR
|
||||
| C_LSL -> C_LSL
|
||||
| C_LSR -> C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> C_EQ
|
||||
| C_NEQ -> C_NEQ
|
||||
| C_LT -> C_LT
|
||||
| C_GT -> C_GT
|
||||
| C_LE -> C_LE
|
||||
| C_GE -> C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> C_SIZE
|
||||
| C_CONCAT -> C_CONCAT
|
||||
| C_SLICE -> C_SLICE
|
||||
| C_BYTES_PACK -> C_BYTES_PACK
|
||||
| C_BYTES_UNPACK -> C_BYTES_UNPACK
|
||||
| C_CONS -> C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR -> C_PAIR
|
||||
| C_CAR -> C_CAR
|
||||
| C_CDR -> C_CDR
|
||||
| C_LEFT -> C_LEFT
|
||||
| C_RIGHT -> C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> C_SET_EMPTY
|
||||
| C_SET_LITERAL -> C_SET_LITERAL
|
||||
| C_SET_ADD -> C_SET_ADD
|
||||
| C_SET_REMOVE -> C_SET_REMOVE
|
||||
| C_SET_ITER -> C_SET_ITER
|
||||
| C_SET_FOLD -> C_SET_FOLD
|
||||
| C_SET_MEM -> C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> C_LIST_EMPTY
|
||||
| C_LIST_LITERAL -> C_LIST_LITERAL
|
||||
| C_LIST_ITER -> C_LIST_ITER
|
||||
| C_LIST_MAP -> C_LIST_MAP
|
||||
| C_LIST_FOLD -> C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP -> C_MAP
|
||||
| C_MAP_EMPTY -> C_MAP_EMPTY
|
||||
| C_MAP_LITERAL -> C_MAP_LITERAL
|
||||
| C_MAP_GET -> C_MAP_GET
|
||||
| C_MAP_GET_FORCE -> C_MAP_GET_FORCE
|
||||
| C_MAP_ADD -> C_MAP_ADD
|
||||
| C_MAP_REMOVE -> C_MAP_REMOVE
|
||||
| C_MAP_UPDATE -> C_MAP_UPDATE
|
||||
| C_MAP_ITER -> C_MAP_ITER
|
||||
| C_MAP_MAP -> C_MAP_MAP
|
||||
| C_MAP_FOLD -> C_MAP_FOLD
|
||||
| C_MAP_MEM -> C_MAP_MEM
|
||||
| C_MAP_FIND -> C_MAP_FIND
|
||||
| C_MAP_FIND_OPT -> C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY -> C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL -> C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> C_SHA256
|
||||
| C_SHA512 -> C_SHA512
|
||||
| C_BLAKE2b -> C_BLAKE2b
|
||||
| C_HASH -> C_HASH
|
||||
| C_HASH_KEY -> C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE -> C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID -> C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL -> C_CALL
|
||||
| C_CONTRACT -> C_CONTRACT
|
||||
| C_CONTRACT_OPT -> C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT -> C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT -> C_AMOUNT
|
||||
| C_BALANCE -> C_BALANCE
|
||||
| C_SOURCE -> C_SOURCE
|
||||
| C_SENDER -> C_SENDER
|
||||
| C_ADDRESS -> C_ADDRESS
|
||||
| C_SELF -> C_SELF
|
||||
| C_SELF_ADDRESS -> C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT -> C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE -> C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT -> C_CREATE_CONTRACT
|
||||
|
||||
let rec transpile_type (t:AST.type_expression) : type_value result =
|
||||
match t.type_content with
|
||||
| T_variable (name) -> fail @@ no_type_variable @@ name
|
||||
@ -135,15 +250,15 @@ let rec transpile_type (t:AST.type_expression) : type_value result =
|
||||
| T_operator (TC_contract x) ->
|
||||
let%bind x' = transpile_type x in
|
||||
ok (T_contract x')
|
||||
| T_operator (TC_map (key,value)) ->
|
||||
let%bind kv' = bind_map_pair transpile_type (key, value) in
|
||||
| T_operator (TC_map {k;v}) ->
|
||||
let%bind kv' = bind_map_pair transpile_type (k, v) in
|
||||
ok (T_map kv')
|
||||
| T_operator (TC_big_map (key,value)) ->
|
||||
let%bind kv' = bind_map_pair transpile_type (key, value) in
|
||||
| T_operator (TC_big_map {k;v}) ->
|
||||
let%bind kv' = bind_map_pair transpile_type (k, v) in
|
||||
ok (T_big_map kv')
|
||||
| T_operator (TC_map_or_big_map (_,_)) ->
|
||||
| T_operator (TC_map_or_big_map _) ->
|
||||
fail @@ corner_case ~loc:"transpiler" "TC_map_or_big_map should have been resolved before transpilation"
|
||||
| T_operator (TC_michelson_or (l,r)) ->
|
||||
| T_operator (TC_michelson_or {l;r}) ->
|
||||
let%bind l' = transpile_type l in
|
||||
let%bind r' = transpile_type r in
|
||||
ok (T_or ((None,l'),(None,r')))
|
||||
@ -156,7 +271,7 @@ let rec transpile_type (t:AST.type_expression) : type_value result =
|
||||
| T_operator (TC_option o) ->
|
||||
let%bind o' = transpile_type o in
|
||||
ok (T_option o')
|
||||
| T_operator (TC_arrow (param , result)) -> (
|
||||
| T_operator (TC_arrow {type1=param ; type2=result}) -> (
|
||||
let%bind param' = transpile_type param in
|
||||
let%bind result' = transpile_type result in
|
||||
ok (T_function (param', result'))
|
||||
@ -171,7 +286,7 @@ let rec transpile_type (t:AST.type_expression) : type_value result =
|
||||
ok (None, T_or (a, b))
|
||||
in
|
||||
let%bind m' = Append_tree.fold_ne
|
||||
(fun (Stage_common.Types.Constructor ann, a) ->
|
||||
(fun (Ast_typed.Types.Constructor ann, a) ->
|
||||
let%bind a = transpile_type a in
|
||||
ok ((
|
||||
if is_michelson_or then
|
||||
@ -213,7 +328,7 @@ let rec transpile_type (t:AST.type_expression) : type_value result =
|
||||
)
|
||||
|
||||
let record_access_to_lr : type_value -> type_value AST.label_map -> AST.label -> (type_value * [`Left | `Right]) list result = fun ty tym ind ->
|
||||
let tys = Stage_common.Helpers.kv_list_of_record_or_tuple tym in
|
||||
let tys = Ast_typed.Helpers.kv_list_of_record_or_tuple tym in
|
||||
let node_tv = Append_tree.of_list tys in
|
||||
let%bind path =
|
||||
let aux (i , _) = i = ind in
|
||||
@ -313,7 +428,7 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
)
|
||||
| E_record m -> (
|
||||
(*list_of_lmap to record_to_list*)
|
||||
let node = Append_tree.of_list @@ Stage_common.Helpers.list_of_record_or_tuple m in
|
||||
let node = Append_tree.of_list @@ Ast_typed.Helpers.list_of_record_or_tuple m in
|
||||
let aux a b : expression result =
|
||||
let%bind a = a in
|
||||
let%bind b = b in
|
||||
@ -330,7 +445,7 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
let%bind ty_lmap =
|
||||
trace_strong (corner_case ~loc:__LOC__ "not a record") @@
|
||||
get_t_record (get_type_expression record) in
|
||||
let%bind ty'_lmap = Stage_common.Helpers.bind_map_lmap transpile_type ty_lmap in
|
||||
let%bind ty'_lmap = Ast_typed.Helpers.bind_map_lmap transpile_type ty_lmap in
|
||||
let%bind path =
|
||||
trace_strong (corner_case ~loc:__LOC__ "record access") @@
|
||||
record_access_to_lr ty' ty'_lmap path in
|
||||
@ -347,7 +462,7 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
let%bind ty_lmap =
|
||||
trace_strong (corner_case ~loc:__LOC__ "not a record") @@
|
||||
get_t_record (get_type_expression record) in
|
||||
let%bind ty'_lmap = Stage_common.Helpers.bind_map_lmap transpile_type ty_lmap in
|
||||
let%bind ty'_lmap = Ast_typed.Helpers.bind_map_lmap transpile_type ty_lmap in
|
||||
let%bind path =
|
||||
trace_strong (corner_case ~loc:__LOC__ "record access") @@
|
||||
record_access_to_lr ty' ty'_lmap path in
|
||||
@ -406,7 +521,7 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
| (C_MAP_FOLD , lst) -> fold lst
|
||||
| _ -> (
|
||||
let%bind lst' = bind_map_list (transpile_annotated_expression) lst in
|
||||
return @@ E_constant {cons_name=name;arguments=lst'}
|
||||
return @@ E_constant {cons_name=transpile_constant' name;arguments=lst'}
|
||||
)
|
||||
)
|
||||
| E_lambda l ->
|
||||
@ -420,30 +535,30 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
| Match_bool {match_true ; match_false} ->
|
||||
let%bind (t , f) = bind_map_pair (transpile_annotated_expression) (match_true, match_false) in
|
||||
return @@ E_if_bool (expr', t, f)
|
||||
| Match_option { match_none; match_some = (name, s, tv) } ->
|
||||
| Match_option { match_none; match_some = {opt; body; tv} } ->
|
||||
let%bind n = transpile_annotated_expression match_none in
|
||||
let%bind (tv' , s') =
|
||||
let%bind tv' = transpile_type tv in
|
||||
let%bind s' = transpile_annotated_expression s in
|
||||
let%bind s' = transpile_annotated_expression body in
|
||||
ok (tv' , s')
|
||||
in
|
||||
return @@ E_if_none (expr' , n , ((name , tv') , s'))
|
||||
return @@ E_if_none (expr' , n , ((opt , tv') , s'))
|
||||
| Match_list {
|
||||
match_nil ;
|
||||
match_cons = ((hd_name) , (tl_name), match_cons, ty) ;
|
||||
match_cons = {hd; tl; body; tv} ;
|
||||
} -> (
|
||||
let%bind nil = transpile_annotated_expression match_nil in
|
||||
let%bind cons =
|
||||
let%bind ty' = transpile_type ty in
|
||||
let%bind match_cons' = transpile_annotated_expression match_cons in
|
||||
ok (((hd_name , ty') , (tl_name , ty')) , match_cons')
|
||||
let%bind ty' = transpile_type tv in
|
||||
let%bind match_cons' = transpile_annotated_expression body in
|
||||
ok (((hd , ty') , (tl , ty')) , match_cons')
|
||||
in
|
||||
return @@ E_if_cons (expr' , nil , cons)
|
||||
)
|
||||
| Match_variant (lst , variant) -> (
|
||||
| Match_variant {cases ; tv} -> (
|
||||
let%bind tree =
|
||||
trace_strong (corner_case ~loc:__LOC__ "getting lr tree") @@
|
||||
tree_of_sum variant in
|
||||
tree_of_sum tv in
|
||||
let%bind tree' = match tree with
|
||||
| Empty -> fail (corner_case ~loc:__LOC__ "match empty variant")
|
||||
| Full x -> ok x in
|
||||
@ -463,12 +578,14 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
|
||||
|
||||
let rec aux top t =
|
||||
match t with
|
||||
| ((`Leaf constructor_name) , tv) -> (
|
||||
let%bind ((_ , name) , body) =
|
||||
| ((`Leaf (AST.Constructor constructor_name)) , tv) -> (
|
||||
let%bind {constructor=_ ; pattern ; body} =
|
||||
trace_option (corner_case ~loc:__LOC__ "missing match clause") @@
|
||||
List.find_opt (fun ((constructor_name' , _) , _) -> constructor_name' = constructor_name) lst in
|
||||
let aux ({constructor = Constructor c ; pattern=_ ; body=_} : AST.matching_content_case) =
|
||||
(c = constructor_name) in
|
||||
List.find_opt aux cases in
|
||||
let%bind body' = transpile_annotated_expression body in
|
||||
return @@ E_let_in ((name , tv) , false , top , body')
|
||||
return @@ E_let_in ((pattern , tv) , false , top , body')
|
||||
)
|
||||
| ((`Node (a , b)) , tv) ->
|
||||
let%bind a' =
|
||||
@ -541,30 +658,30 @@ and transpile_recursive {fun_name; fun_type; lambda} =
|
||||
Match_bool {match_true; match_false} ->
|
||||
let%bind (t , f) = bind_map_pair (replace_callback fun_name loop_type shadowed) (match_true, match_false) in
|
||||
return @@ E_if_bool (expr, t, f)
|
||||
| Match_option { match_none; match_some = (name, s, tv) } ->
|
||||
| Match_option { match_none; match_some = {opt; body; tv} } ->
|
||||
let%bind n = replace_callback fun_name loop_type shadowed match_none in
|
||||
let%bind (tv' , s') =
|
||||
let%bind tv' = transpile_type tv in
|
||||
let%bind s' = replace_callback fun_name loop_type shadowed s in
|
||||
let%bind s' = replace_callback fun_name loop_type shadowed body in
|
||||
ok (tv' , s')
|
||||
in
|
||||
return @@ E_if_none (expr , n , ((name , tv') , s'))
|
||||
return @@ E_if_none (expr , n , ((opt , tv') , s'))
|
||||
| Match_list {
|
||||
match_nil ;
|
||||
match_cons = ((hd_name) , (tl_name), match_cons, ty) ;
|
||||
match_cons = { hd ; tl ; body ; tv } ;
|
||||
} -> (
|
||||
let%bind nil = replace_callback fun_name loop_type shadowed match_nil in
|
||||
let%bind cons =
|
||||
let%bind ty' = transpile_type ty in
|
||||
let%bind match_cons' = replace_callback fun_name loop_type shadowed match_cons in
|
||||
ok (((hd_name , ty') , (tl_name , ty')) , match_cons')
|
||||
let%bind ty' = transpile_type tv in
|
||||
let%bind match_cons' = replace_callback fun_name loop_type shadowed body in
|
||||
ok (((hd , ty') , (tl , ty')) , match_cons')
|
||||
in
|
||||
return @@ E_if_cons (expr , nil , cons)
|
||||
)
|
||||
| Match_variant (lst , variant) -> (
|
||||
| Match_variant {cases;tv} -> (
|
||||
let%bind tree =
|
||||
trace_strong (corner_case ~loc:__LOC__ "getting lr tree") @@
|
||||
tree_of_sum variant in
|
||||
tree_of_sum tv in
|
||||
let%bind tree' = match tree with
|
||||
| Empty -> fail (corner_case ~loc:__LOC__ "match empty variant")
|
||||
| Full x -> ok x in
|
||||
@ -583,12 +700,14 @@ and transpile_recursive {fun_name; fun_type; lambda} =
|
||||
in
|
||||
let rec aux top t =
|
||||
match t with
|
||||
| ((`Leaf constructor_name) , tv) -> (
|
||||
let%bind ((_ , name) , body) =
|
||||
| ((`Leaf (AST.Constructor constructor_name)) , tv) -> (
|
||||
let%bind {constructor=_ ; pattern ; body} =
|
||||
trace_option (corner_case ~loc:__LOC__ "missing match clause") @@
|
||||
List.find_opt (fun ((constructor_name' , _) , _) -> constructor_name' = constructor_name) lst in
|
||||
let aux ({constructor = Constructor c ; pattern=_ ; body=_} : AST.matching_content_case) =
|
||||
(c = constructor_name) in
|
||||
List.find_opt aux cases in
|
||||
let%bind body' = replace_callback fun_name loop_type shadowed body in
|
||||
return @@ E_let_in ((name , tv) , false , top , body')
|
||||
return @@ E_let_in ((pattern , tv) , false , top , body')
|
||||
)
|
||||
| ((`Node (a , b)) , tv) ->
|
||||
let%bind a' =
|
||||
@ -622,12 +741,11 @@ and transpile_recursive {fun_name; fun_type; lambda} =
|
||||
|
||||
let transpile_declaration env (d:AST.declaration) : toplevel_statement result =
|
||||
match d with
|
||||
| Declaration_constant (name,expression, inline, _) ->
|
||||
let name = name in
|
||||
let%bind expression = transpile_annotated_expression expression in
|
||||
| Declaration_constant { binder ; expr ; inline ; post_env=_ } ->
|
||||
let%bind expression = transpile_annotated_expression expr in
|
||||
let tv = Combinators.Expression.get_type expression in
|
||||
let env' = Environment.add (name, tv) env in
|
||||
ok @@ ((name, inline, expression), environment_wrap env env')
|
||||
let env' = Environment.add (binder, tv) env in
|
||||
ok @@ ((binder, inline, expression), environment_wrap env env')
|
||||
|
||||
let transpile_program (lst : AST.program) : program result =
|
||||
let aux (prev:(toplevel_statement list * Environment.t) result) cur =
|
||||
|
@ -150,43 +150,42 @@ let rec untranspile (v : value) (t : AST.type_expression) : AST.expression resul
|
||||
let%bind s' = untranspile s o in
|
||||
ok (e_a_empty_some s')
|
||||
)
|
||||
| TC_map (k_ty,v_ty)-> (
|
||||
| TC_map {k=k_ty;v=v_ty}-> (
|
||||
let%bind map =
|
||||
trace_strong (wrong_mini_c_value "map" v) @@
|
||||
get_map v in
|
||||
let%bind map' =
|
||||
let aux = fun (k, v) ->
|
||||
let%bind k' = untranspile k k_ty in
|
||||
let%bind v' = untranspile v v_ty in
|
||||
ok (k', v') in
|
||||
let%bind k = untranspile k k_ty in
|
||||
let%bind v = untranspile v v_ty in
|
||||
ok ({k; v} : AST.map_kv) in
|
||||
bind_map_list aux map in
|
||||
let map' = List.sort_uniq compare map' in
|
||||
let aux = fun prev (k, v) ->
|
||||
let (k', v') = (k , v ) in
|
||||
return @@ E_constant {cons_name=C_MAP_ADD;arguments=[k' ; v' ; prev]}
|
||||
let aux = fun prev ({ k ; v } : AST.map_kv) ->
|
||||
return @@ E_constant {cons_name=C_MAP_ADD;arguments=[k ; v ; prev]}
|
||||
in
|
||||
let%bind init = return @@ E_constant {cons_name=C_MAP_EMPTY;arguments=[]} in
|
||||
bind_fold_right_list aux init map'
|
||||
)
|
||||
| TC_big_map (k_ty, v_ty) -> (
|
||||
| TC_big_map {k=k_ty; v=v_ty} -> (
|
||||
let%bind big_map =
|
||||
trace_strong (wrong_mini_c_value "big_map" v) @@
|
||||
get_big_map v in
|
||||
let%bind big_map' =
|
||||
let aux = fun (k, v) ->
|
||||
let%bind k' = untranspile k k_ty in
|
||||
let%bind v' = untranspile v v_ty in
|
||||
ok (k', v') in
|
||||
let%bind k = untranspile k k_ty in
|
||||
let%bind v = untranspile v v_ty in
|
||||
ok ({k; v} : AST.map_kv) in
|
||||
bind_map_list aux big_map in
|
||||
let big_map' = List.sort_uniq compare big_map' in
|
||||
let aux = fun prev (k, v) ->
|
||||
let aux = fun prev ({ k ; v } : AST.map_kv) ->
|
||||
return @@ E_constant {cons_name=C_MAP_ADD;arguments=[k ; v ; prev]}
|
||||
in
|
||||
let%bind init = return @@ E_constant {cons_name=C_BIG_MAP_EMPTY;arguments=[]} in
|
||||
bind_fold_right_list aux init big_map'
|
||||
)
|
||||
| TC_map_or_big_map (_, _) -> fail @@ corner_case ~loc:"untranspiler" "TC_map_or_big_map t should not be present in mini-c"
|
||||
| TC_michelson_or (l_ty, r_ty) -> (
|
||||
| TC_map_or_big_map _ -> fail @@ corner_case ~loc:"untranspiler" "TC_map_or_big_map t should not be present in mini-c"
|
||||
| TC_michelson_or {l=l_ty; r=r_ty} -> (
|
||||
let%bind v' = bind_map_or (get_left , get_right) v in
|
||||
( match v' with
|
||||
| D_left l ->
|
||||
@ -244,7 +243,7 @@ let rec untranspile (v : value) (t : AST.type_expression) : AST.expression resul
|
||||
let%bind sub = untranspile v tv in
|
||||
return (E_constructor {constructor=Constructor name;element=sub})
|
||||
| T_record m ->
|
||||
let lst = Stage_common.Helpers.kv_list_of_record_or_tuple m in
|
||||
let lst = Ast_typed.Helpers.kv_list_of_record_or_tuple m in
|
||||
let%bind node = match Append_tree.of_list lst with
|
||||
| Empty -> fail @@ corner_case ~loc:__LOC__ "empty record"
|
||||
| Full t -> ok t in
|
||||
|
@ -120,7 +120,7 @@ module Errors = struct
|
||||
let data = [
|
||||
("expression" ,
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
thunk @@ Parser.Cameligo.ParserLog.expr_to_string
|
||||
thunk @@ Parser_cameligo.ParserLog.expr_to_string
|
||||
~offsets:true ~mode:`Point t)]
|
||||
in error ~data title message
|
||||
|
||||
@ -204,7 +204,7 @@ let rec typed_pattern_to_typed_vars : Raw.pattern -> _ = fun pattern ->
|
||||
| Raw.PPar pp -> typed_pattern_to_typed_vars pp.value.inside
|
||||
| Raw.PTyped pt ->
|
||||
let (p,t) = pt.value.pattern,pt.value.type_expr in
|
||||
let%bind p = tuple_pattern_to_vars p in
|
||||
let%bind p = tuple_pattern_to_vars p in
|
||||
let%bind t = compile_type_expression t in
|
||||
ok @@ (p,t)
|
||||
| other -> (fail @@ wrong_pattern "parenthetical or type annotation" other)
|
||||
@ -320,7 +320,7 @@ let rec compile_expression :
|
||||
| [] -> e_variable (Var.of_name name)
|
||||
| _ ->
|
||||
let aux expr (Label l) = e_record_accessor expr l in
|
||||
List.fold_left aux (e_variable (Var.of_name name)) path in
|
||||
List.fold_left aux (e_variable (Var.of_name name)) path in
|
||||
let updates = u.updates.value.ne_elements in
|
||||
let%bind updates' =
|
||||
let aux (f:Raw.field_path_assign Raw.reg) =
|
||||
@ -330,13 +330,13 @@ let rec compile_expression :
|
||||
in
|
||||
bind_map_list aux @@ npseq_to_list updates
|
||||
in
|
||||
let aux ur (path, expr) =
|
||||
let aux ur (path, expr) =
|
||||
let rec aux record = function
|
||||
| [] -> failwith "error in parsing"
|
||||
| hd :: [] -> ok @@ e_record_update ~loc record hd expr
|
||||
| hd :: tl ->
|
||||
| hd :: tl ->
|
||||
let%bind expr = (aux (e_record_accessor ~loc record hd) tl) in
|
||||
ok @@ e_record_update ~loc record hd expr
|
||||
ok @@ e_record_update ~loc record hd expr
|
||||
in
|
||||
aux ur path in
|
||||
bind_fold_list aux record updates'
|
||||
@ -392,9 +392,9 @@ let rec compile_expression :
|
||||
(chain_let_in tl body)
|
||||
| [] -> body (* Precluded by corner case assertion above *)
|
||||
in
|
||||
let%bind ty_opt = match ty_opt with
|
||||
| None -> (match let_rhs with
|
||||
| EFun {value={binders;lhs_type}} ->
|
||||
let%bind ty_opt = match ty_opt with
|
||||
| None -> (match let_rhs with
|
||||
| EFun {value={binders;lhs_type}} ->
|
||||
let f_args = nseq_to_list (binders) in
|
||||
let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in
|
||||
let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in
|
||||
@ -409,12 +409,12 @@ let rec compile_expression :
|
||||
(* Bind the right hand side so we only evaluate it once *)
|
||||
else ok (e_let_in (rhs_b, ty_opt) inline rhs' (chain_let_in prep_vars body))
|
||||
in
|
||||
let%bind ret_expr = match kwd_rec with
|
||||
let%bind ret_expr = match kwd_rec with
|
||||
| None -> ok @@ ret_expr
|
||||
| Some _ ->
|
||||
match ret_expr.expression_content with
|
||||
| Some _ ->
|
||||
match ret_expr.expression_content with
|
||||
| E_let_in li -> (
|
||||
let%bind lambda =
|
||||
let%bind lambda =
|
||||
let rec aux rhs = match rhs.expression_content with
|
||||
| E_lambda l -> ok @@ l
|
||||
| E_ascription a -> aux a.anno_expr
|
||||
@ -423,9 +423,9 @@ let rec compile_expression :
|
||||
aux rhs'
|
||||
in
|
||||
let fun_name = fst @@ List.hd prep_vars in
|
||||
let%bind fun_type = match ty_opt with
|
||||
let%bind fun_type = match ty_opt with
|
||||
| Some t -> ok @@ t
|
||||
| None -> match rhs'.expression_content with
|
||||
| None -> match rhs'.expression_content with
|
||||
| E_ascription a -> ok a.type_annotation
|
||||
| _ -> fail @@ untyped_recursive_function e
|
||||
in
|
||||
@ -878,9 +878,9 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
|
||||
ok (Raw.EFun {region=Region.ghost ; value=fun_},List.fold_right' aux lhs_type' ty)
|
||||
in
|
||||
let%bind rhs' = compile_expression let_rhs in
|
||||
let%bind lhs_type = match lhs_type with
|
||||
| None -> (match let_rhs with
|
||||
| EFun {value={binders;lhs_type}} ->
|
||||
let%bind lhs_type = match lhs_type with
|
||||
| None -> (match let_rhs with
|
||||
| EFun {value={binders;lhs_type}} ->
|
||||
let f_args = nseq_to_list (binders) in
|
||||
let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in
|
||||
let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in
|
||||
@ -891,13 +891,13 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
|
||||
| Some t -> ok @@ Some t
|
||||
in
|
||||
let binder = Var.of_name var.value in
|
||||
let%bind rhs' = match recursive with
|
||||
None -> ok @@ rhs'
|
||||
| Some _ -> match rhs'.expression_content with
|
||||
let%bind rhs' = match recursive with
|
||||
None -> ok @@ rhs'
|
||||
| Some _ -> match rhs'.expression_content with
|
||||
E_lambda lambda ->
|
||||
(match lhs_type with
|
||||
None -> fail @@ untyped_recursive_function var
|
||||
| Some (lhs_type) ->
|
||||
(match lhs_type with
|
||||
None -> fail @@ untyped_recursive_function var
|
||||
| Some (lhs_type) ->
|
||||
let expression_content = E_recursive {fun_name=binder;fun_type=lhs_type;lambda} in
|
||||
ok @@ {rhs' with expression_content})
|
||||
| _ -> ok @@ rhs'
|
||||
@ -996,7 +996,7 @@ and compile_cases : type a . (Raw.pattern * a) list -> (a, unit) matching_conten
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
let content () =
|
||||
Printf.sprintf "Pattern : %s"
|
||||
(Parser.Cameligo.ParserLog.pattern_to_string
|
||||
(Parser_cameligo.ParserLog.pattern_to_string
|
||||
~offsets:true ~mode:`Point x) in
|
||||
error title content
|
||||
in
|
||||
|
@ -66,15 +66,15 @@ module Wrap = struct
|
||||
P_constant (csttag, [])
|
||||
| T_operator (type_operator) ->
|
||||
let (csttag, args) = Core.(match type_operator with
|
||||
| TC_option o -> (C_option, [o])
|
||||
| TC_set s -> (C_set, [s])
|
||||
| TC_map ( k , v ) -> (C_map, [k;v])
|
||||
| TC_big_map ( k , v) -> (C_big_map, [k;v])
|
||||
| TC_map_or_big_map ( k , v) -> (C_map, [k;v])
|
||||
| TC_michelson_or ( k , v) -> (C_michelson_or, [k;v])
|
||||
| TC_arrow ( arg , ret ) -> (C_arrow, [ arg ; ret ])
|
||||
| TC_list l -> (C_list, [l])
|
||||
| TC_contract c -> (C_contract, [c])
|
||||
| TC_option o -> (C_option, [o])
|
||||
| TC_set s -> (C_set, [s])
|
||||
| TC_map { k ; v } -> (C_map, [k;v])
|
||||
| TC_big_map { k ; v } -> (C_big_map, [k;v])
|
||||
| TC_map_or_big_map { k ; v } -> (C_map, [k;v])
|
||||
| TC_michelson_or { l; r } -> (C_michelson_or, [l;r])
|
||||
| TC_arrow { type1 ; type2 } -> (C_arrow, [ type1 ; type2 ])
|
||||
| TC_list l -> (C_list, [l])
|
||||
| TC_contract c -> (C_contract, [c])
|
||||
)
|
||||
in
|
||||
P_constant (csttag, List.map type_expression_to_type_value args)
|
||||
|
@ -163,6 +163,274 @@ end
|
||||
|
||||
open Errors
|
||||
|
||||
let convert_constructor' (I.Constructor c) = O.Constructor c
|
||||
let unconvert_constructor' (O.Constructor c) = I.Constructor c
|
||||
let convert_label (I.Label c) = O.Label c
|
||||
let unconvert_label (O.Label c) = I.Label c
|
||||
let convert_type_constant : I.type_constant -> O.type_constant = function
|
||||
| TC_unit -> TC_unit
|
||||
| TC_string -> TC_string
|
||||
| TC_bytes -> TC_bytes
|
||||
| TC_nat -> TC_nat
|
||||
| TC_int -> TC_int
|
||||
| TC_mutez -> TC_mutez
|
||||
| TC_bool -> TC_bool
|
||||
| TC_operation -> TC_operation
|
||||
| TC_address -> TC_address
|
||||
| TC_key -> TC_key
|
||||
| TC_key_hash -> TC_key_hash
|
||||
| TC_chain_id -> TC_chain_id
|
||||
| TC_signature -> TC_signature
|
||||
| TC_timestamp -> TC_timestamp
|
||||
| TC_void -> TC_void
|
||||
|
||||
let unconvert_type_constant : O.type_constant -> I.type_constant = function
|
||||
| TC_unit -> TC_unit
|
||||
| TC_string -> TC_string
|
||||
| TC_bytes -> TC_bytes
|
||||
| TC_nat -> TC_nat
|
||||
| TC_int -> TC_int
|
||||
| TC_mutez -> TC_mutez
|
||||
| TC_bool -> TC_bool
|
||||
| TC_operation -> TC_operation
|
||||
| TC_address -> TC_address
|
||||
| TC_key -> TC_key
|
||||
| TC_key_hash -> TC_key_hash
|
||||
| TC_chain_id -> TC_chain_id
|
||||
| TC_signature -> TC_signature
|
||||
| TC_timestamp -> TC_timestamp
|
||||
| TC_void -> TC_void
|
||||
|
||||
let convert_constant' : I.constant' -> O.constant' = function
|
||||
| C_INT -> C_INT
|
||||
| C_UNIT -> C_UNIT
|
||||
| C_NIL -> C_NIL
|
||||
| C_NOW -> C_NOW
|
||||
| C_IS_NAT -> C_IS_NAT
|
||||
| C_SOME -> C_SOME
|
||||
| C_NONE -> C_NONE
|
||||
| C_ASSERTION -> C_ASSERTION
|
||||
| C_ASSERT_INFERRED -> C_ASSERT_INFERRED
|
||||
| C_FAILWITH -> C_FAILWITH
|
||||
| C_UPDATE -> C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER -> C_ITER
|
||||
| C_FOLD_WHILE -> C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE -> C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP -> C_FOLD_STOP
|
||||
| C_LOOP_LEFT -> C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE -> C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP -> C_LOOP_STOP
|
||||
| C_FOLD -> C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG -> C_NEG
|
||||
| C_ABS -> C_ABS
|
||||
| C_ADD -> C_ADD
|
||||
| C_SUB -> C_SUB
|
||||
| C_MUL -> C_MUL
|
||||
| C_EDIV -> C_EDIV
|
||||
| C_DIV -> C_DIV
|
||||
| C_MOD -> C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT -> C_NOT
|
||||
| C_AND -> C_AND
|
||||
| C_OR -> C_OR
|
||||
| C_XOR -> C_XOR
|
||||
| C_LSL -> C_LSL
|
||||
| C_LSR -> C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> C_EQ
|
||||
| C_NEQ -> C_NEQ
|
||||
| C_LT -> C_LT
|
||||
| C_GT -> C_GT
|
||||
| C_LE -> C_LE
|
||||
| C_GE -> C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> C_SIZE
|
||||
| C_CONCAT -> C_CONCAT
|
||||
| C_SLICE -> C_SLICE
|
||||
| C_BYTES_PACK -> C_BYTES_PACK
|
||||
| C_BYTES_UNPACK -> C_BYTES_UNPACK
|
||||
| C_CONS -> C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR -> C_PAIR
|
||||
| C_CAR -> C_CAR
|
||||
| C_CDR -> C_CDR
|
||||
| C_LEFT -> C_LEFT
|
||||
| C_RIGHT -> C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> C_SET_EMPTY
|
||||
| C_SET_LITERAL -> C_SET_LITERAL
|
||||
| C_SET_ADD -> C_SET_ADD
|
||||
| C_SET_REMOVE -> C_SET_REMOVE
|
||||
| C_SET_ITER -> C_SET_ITER
|
||||
| C_SET_FOLD -> C_SET_FOLD
|
||||
| C_SET_MEM -> C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> C_LIST_EMPTY
|
||||
| C_LIST_LITERAL -> C_LIST_LITERAL
|
||||
| C_LIST_ITER -> C_LIST_ITER
|
||||
| C_LIST_MAP -> C_LIST_MAP
|
||||
| C_LIST_FOLD -> C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP -> C_MAP
|
||||
| C_MAP_EMPTY -> C_MAP_EMPTY
|
||||
| C_MAP_LITERAL -> C_MAP_LITERAL
|
||||
| C_MAP_GET -> C_MAP_GET
|
||||
| C_MAP_GET_FORCE -> C_MAP_GET_FORCE
|
||||
| C_MAP_ADD -> C_MAP_ADD
|
||||
| C_MAP_REMOVE -> C_MAP_REMOVE
|
||||
| C_MAP_UPDATE -> C_MAP_UPDATE
|
||||
| C_MAP_ITER -> C_MAP_ITER
|
||||
| C_MAP_MAP -> C_MAP_MAP
|
||||
| C_MAP_FOLD -> C_MAP_FOLD
|
||||
| C_MAP_MEM -> C_MAP_MEM
|
||||
| C_MAP_FIND -> C_MAP_FIND
|
||||
| C_MAP_FIND_OPT -> C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY -> C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL -> C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> C_SHA256
|
||||
| C_SHA512 -> C_SHA512
|
||||
| C_BLAKE2b -> C_BLAKE2b
|
||||
| C_HASH -> C_HASH
|
||||
| C_HASH_KEY -> C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE -> C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID -> C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL -> C_CALL
|
||||
| C_CONTRACT -> C_CONTRACT
|
||||
| C_CONTRACT_OPT -> C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT -> C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT -> C_AMOUNT
|
||||
| C_BALANCE -> C_BALANCE
|
||||
| C_SOURCE -> C_SOURCE
|
||||
| C_SENDER -> C_SENDER
|
||||
| C_ADDRESS -> C_ADDRESS
|
||||
| C_SELF -> C_SELF
|
||||
| C_SELF_ADDRESS -> C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT -> C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE -> C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT -> C_CREATE_CONTRACT
|
||||
|
||||
let unconvert_constant' : O.constant' -> I.constant' = function
|
||||
| C_INT -> C_INT
|
||||
| C_UNIT -> C_UNIT
|
||||
| C_NIL -> C_NIL
|
||||
| C_NOW -> C_NOW
|
||||
| C_IS_NAT -> C_IS_NAT
|
||||
| C_SOME -> C_SOME
|
||||
| C_NONE -> C_NONE
|
||||
| C_ASSERTION -> C_ASSERTION
|
||||
| C_ASSERT_INFERRED -> C_ASSERT_INFERRED
|
||||
| C_FAILWITH -> C_FAILWITH
|
||||
| C_UPDATE -> C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER -> C_ITER
|
||||
| C_FOLD_WHILE -> C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE -> C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP -> C_FOLD_STOP
|
||||
| C_LOOP_LEFT -> C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE -> C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP -> C_LOOP_STOP
|
||||
| C_FOLD -> C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG -> C_NEG
|
||||
| C_ABS -> C_ABS
|
||||
| C_ADD -> C_ADD
|
||||
| C_SUB -> C_SUB
|
||||
| C_MUL -> C_MUL
|
||||
| C_EDIV -> C_EDIV
|
||||
| C_DIV -> C_DIV
|
||||
| C_MOD -> C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT -> C_NOT
|
||||
| C_AND -> C_AND
|
||||
| C_OR -> C_OR
|
||||
| C_XOR -> C_XOR
|
||||
| C_LSL -> C_LSL
|
||||
| C_LSR -> C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> C_EQ
|
||||
| C_NEQ -> C_NEQ
|
||||
| C_LT -> C_LT
|
||||
| C_GT -> C_GT
|
||||
| C_LE -> C_LE
|
||||
| C_GE -> C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> C_SIZE
|
||||
| C_CONCAT -> C_CONCAT
|
||||
| C_SLICE -> C_SLICE
|
||||
| C_BYTES_PACK -> C_BYTES_PACK
|
||||
| C_BYTES_UNPACK -> C_BYTES_UNPACK
|
||||
| C_CONS -> C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR -> C_PAIR
|
||||
| C_CAR -> C_CAR
|
||||
| C_CDR -> C_CDR
|
||||
| C_LEFT -> C_LEFT
|
||||
| C_RIGHT -> C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> C_SET_EMPTY
|
||||
| C_SET_LITERAL -> C_SET_LITERAL
|
||||
| C_SET_ADD -> C_SET_ADD
|
||||
| C_SET_REMOVE -> C_SET_REMOVE
|
||||
| C_SET_ITER -> C_SET_ITER
|
||||
| C_SET_FOLD -> C_SET_FOLD
|
||||
| C_SET_MEM -> C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> C_LIST_EMPTY
|
||||
| C_LIST_LITERAL -> C_LIST_LITERAL
|
||||
| C_LIST_ITER -> C_LIST_ITER
|
||||
| C_LIST_MAP -> C_LIST_MAP
|
||||
| C_LIST_FOLD -> C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP -> C_MAP
|
||||
| C_MAP_EMPTY -> C_MAP_EMPTY
|
||||
| C_MAP_LITERAL -> C_MAP_LITERAL
|
||||
| C_MAP_GET -> C_MAP_GET
|
||||
| C_MAP_GET_FORCE -> C_MAP_GET_FORCE
|
||||
| C_MAP_ADD -> C_MAP_ADD
|
||||
| C_MAP_REMOVE -> C_MAP_REMOVE
|
||||
| C_MAP_UPDATE -> C_MAP_UPDATE
|
||||
| C_MAP_ITER -> C_MAP_ITER
|
||||
| C_MAP_MAP -> C_MAP_MAP
|
||||
| C_MAP_FOLD -> C_MAP_FOLD
|
||||
| C_MAP_MEM -> C_MAP_MEM
|
||||
| C_MAP_FIND -> C_MAP_FIND
|
||||
| C_MAP_FIND_OPT -> C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY -> C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL -> C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> C_SHA256
|
||||
| C_SHA512 -> C_SHA512
|
||||
| C_BLAKE2b -> C_BLAKE2b
|
||||
| C_HASH -> C_HASH
|
||||
| C_HASH_KEY -> C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE -> C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID -> C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL -> C_CALL
|
||||
| C_CONTRACT -> C_CONTRACT
|
||||
| C_CONTRACT_OPT -> C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT -> C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT -> C_AMOUNT
|
||||
| C_BALANCE -> C_BALANCE
|
||||
| C_SOURCE -> C_SOURCE
|
||||
| C_SENDER -> C_SENDER
|
||||
| C_ADDRESS -> C_ADDRESS
|
||||
| C_SELF -> C_SELF
|
||||
| C_SELF_ADDRESS -> C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT -> C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE -> C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT -> C_CREATE_CONTRACT
|
||||
|
||||
(*
|
||||
let rec type_program (p:I.program) : O.program result =
|
||||
let aux (e, acc:(environment * O.declaration Location.wrap list)) (d:I.declaration Location.wrap) =
|
||||
@ -187,16 +455,16 @@ let rec type_declaration env state : I.declaration -> (environment * Solver.stat
|
||||
let%bind tv = evaluate_type env type_expression in
|
||||
let env' = Environment.add_type (type_name) tv env in
|
||||
ok (env', state , None)
|
||||
| Declaration_constant (name , tv_opt , inline, expression) -> (
|
||||
| Declaration_constant (binder , tv_opt , inline, expression) -> (
|
||||
(*
|
||||
Determine the type of the expression and add it to the environment
|
||||
*)
|
||||
let%bind tv'_opt = bind_map_option (evaluate_type env) tv_opt in
|
||||
let%bind (ae' , state') =
|
||||
trace (constant_declaration_error name expression tv'_opt) @@
|
||||
let%bind (expr , state') =
|
||||
trace (constant_declaration_error binder expression tv'_opt) @@
|
||||
type_expression env state expression in
|
||||
let env' = Environment.add_ez_ae name ae' env in
|
||||
ok (env', state' , Some (O.Declaration_constant (name, ae', inline, env') ))
|
||||
let post_env = Environment.add_ez_ae binder expr env in
|
||||
ok (post_env, state' , Some (O.Declaration_constant { binder ; expr ; inline ; post_env} ))
|
||||
)
|
||||
|
||||
and type_match : environment -> Solver.state -> O.type_expression -> I.matching_expr -> I.expression -> Location.t -> (O.matching_expr * Solver.state) result =
|
||||
@ -209,14 +477,14 @@ and type_match : environment -> Solver.state -> O.type_expression -> I.matching_
|
||||
let%bind (match_false , state'') = type_expression e state' match_false in
|
||||
ok (O.Match_bool {match_true ; match_false} , state'')
|
||||
| Match_option {match_none ; match_some} ->
|
||||
let%bind t_opt =
|
||||
let%bind tv =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@@ get_t_option t in
|
||||
let%bind (match_none , state') = type_expression e state match_none in
|
||||
let (n, b, _) = match_some in
|
||||
let e' = Environment.add_ez_binder n t_opt e in
|
||||
let%bind (b' , state'') = type_expression e' state' b in
|
||||
ok (O.Match_option {match_none ; match_some = (n, b', t_opt)} , state'')
|
||||
let (opt, b, _) = match_some in
|
||||
let e' = Environment.add_ez_binder opt tv e in
|
||||
let%bind (body , state'') = type_expression e' state' b in
|
||||
ok (O.Match_option {match_none ; match_some = { opt; body; tv}} , state'')
|
||||
| Match_list {match_nil ; match_cons} ->
|
||||
let%bind t_elt =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@ -225,19 +493,19 @@ and type_match : environment -> Solver.state -> O.type_expression -> I.matching_
|
||||
let (hd, tl, b, _) = match_cons in
|
||||
let e' = Environment.add_ez_binder hd t_elt e in
|
||||
let e' = Environment.add_ez_binder tl t e' in
|
||||
let%bind (b' , state'') = type_expression e' state' b in
|
||||
ok (O.Match_list {match_nil ; match_cons = (hd, tl, b',t)} , state'')
|
||||
| Match_tuple ((lst, b),_) ->
|
||||
let%bind t_tuple =
|
||||
let%bind (body , state'') = type_expression e' state' b in
|
||||
ok (O.Match_list {match_nil ; match_cons = {hd; tl; body;tv=t}} , state'')
|
||||
| Match_tuple ((vars, b),_) ->
|
||||
let%bind tvs =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@@ get_t_tuple t in
|
||||
let%bind lst' =
|
||||
generic_try (match_tuple_wrong_arity t_tuple lst loc)
|
||||
@@ (fun () -> List.combine lst t_tuple) in
|
||||
generic_try (match_tuple_wrong_arity tvs vars loc)
|
||||
@@ (fun () -> List.combine vars tvs) in
|
||||
let aux prev (name, tv) = Environment.add_ez_binder name tv prev in
|
||||
let e' = List.fold_left aux e lst' in
|
||||
let%bind (b' , state') = type_expression e' state b in
|
||||
ok (O.Match_tuple ((lst, b'), t_tuple) , state')
|
||||
let%bind (body , state') = type_expression e' state b in
|
||||
ok (O.Match_tuple {vars ; body ; tvs} , state')
|
||||
| Match_variant (lst,_) ->
|
||||
let%bind variant_opt =
|
||||
let aux acc ((constructor_name , _) , _) =
|
||||
@ -267,8 +535,8 @@ and type_match : environment -> Solver.state -> O.type_expression -> I.matching_
|
||||
let%bind variant_cases' =
|
||||
trace (match_error ~expected:i ~actual:t loc)
|
||||
@@ Ast_typed.Combinators.get_t_sum variant in
|
||||
let variant_cases = List.map fst @@ I.CMap.to_kv_list variant_cases' in
|
||||
let match_cases = List.map (Function.compose fst fst) lst in
|
||||
let variant_cases = List.map fst @@ O.CMap.to_kv_list variant_cases' in
|
||||
let match_cases = List.map (fun x -> convert_constructor' @@ fst @@ fst x) lst in
|
||||
let test_case = fun c ->
|
||||
Assert.assert_true (List.mem c match_cases)
|
||||
in
|
||||
@ -280,17 +548,18 @@ and type_match : environment -> Solver.state -> O.type_expression -> I.matching_
|
||||
Assert.assert_true List.(length variant_cases = length match_cases) in
|
||||
ok ()
|
||||
in
|
||||
let%bind (state'' , lst') =
|
||||
let aux state ((constructor_name , name) , b) =
|
||||
let%bind (state'' , cases) =
|
||||
let aux state ((constructor_name , pattern) , b) =
|
||||
let%bind (constructor , _) =
|
||||
trace_option (unbound_constructor e constructor_name loc) @@
|
||||
Environment.get_constructor constructor_name e in
|
||||
let e' = Environment.add_ez_binder name constructor e in
|
||||
let%bind (b' , state') = type_expression e' state b in
|
||||
ok (state' , ((constructor_name , name) , b'))
|
||||
let e' = Environment.add_ez_binder pattern constructor e in
|
||||
let%bind (body , state') = type_expression e' state b in
|
||||
let constructor = convert_constructor' constructor_name in
|
||||
ok (state' , ({constructor ; pattern ; body = body} : O.matching_content_case))
|
||||
in
|
||||
bind_fold_map_list aux state lst in
|
||||
ok (O.Match_variant (lst' , variant) , state'')
|
||||
ok (O.Match_variant {cases ; tv=variant } , state'')
|
||||
|
||||
(*
|
||||
Recursively search the type_expression and return a result containing the
|
||||
@ -307,17 +576,17 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
let aux k v prev =
|
||||
let%bind prev' = prev in
|
||||
let%bind v' = evaluate_type e v in
|
||||
ok @@ I.CMap.add k v' prev'
|
||||
ok @@ O.CMap.add (convert_constructor' k) v' prev'
|
||||
in
|
||||
let%bind m = I.CMap.fold aux m (ok I.CMap.empty) in
|
||||
let%bind m = I.CMap.fold aux m (ok O.CMap.empty) in
|
||||
return (T_sum m)
|
||||
| T_record m ->
|
||||
let aux k v prev =
|
||||
let%bind prev' = prev in
|
||||
let%bind v' = evaluate_type e v in
|
||||
ok @@ I.LMap.add k v' prev'
|
||||
ok @@ O.LMap.add (convert_label k) v' prev'
|
||||
in
|
||||
let%bind m = I.LMap.fold aux m (ok I.LMap.empty) in
|
||||
let%bind m = I.LMap.fold aux m (ok O.LMap.empty) in
|
||||
return (T_record m)
|
||||
| T_variable name ->
|
||||
let%bind tv =
|
||||
@ -325,7 +594,7 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
@@ Environment.get_type_opt (name) e in
|
||||
ok tv
|
||||
| T_constant cst ->
|
||||
return (T_constant cst)
|
||||
return (T_constant (convert_type_constant cst))
|
||||
| T_operator opt ->
|
||||
let%bind opt = match opt with
|
||||
| TC_set s ->
|
||||
@ -340,26 +609,26 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
| TC_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_map (k,v)
|
||||
ok @@ O.TC_map {k;v}
|
||||
| TC_big_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_big_map (k,v)
|
||||
ok @@ O.TC_big_map {k;v}
|
||||
| TC_map_or_big_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_map_or_big_map (k,v)
|
||||
ok @@ O.TC_map_or_big_map {k;v}
|
||||
| TC_michelson_or (l,r) ->
|
||||
let%bind l = evaluate_type e l in
|
||||
let%bind r = evaluate_type e r in
|
||||
ok @@ O.TC_michelson_or (l,r)
|
||||
ok @@ O.TC_michelson_or {l;r}
|
||||
| TC_contract c ->
|
||||
let%bind c = evaluate_type e c in
|
||||
ok @@ O.TC_contract c
|
||||
| TC_arrow ( arg , ret ) ->
|
||||
let%bind arg' = evaluate_type e arg in
|
||||
let%bind ret' = evaluate_type e ret in
|
||||
ok @@ O.TC_arrow ( arg' , ret' )
|
||||
ok @@ O.TC_arrow { type1=arg' ; type2=ret' }
|
||||
in
|
||||
return (T_operator (opt))
|
||||
|
||||
@ -461,6 +730,7 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
* ) *)
|
||||
| E_record_accessor {record;path} -> (
|
||||
let%bind (base' , state') = type_expression e state record in
|
||||
let path = convert_label path in
|
||||
let wrapped = Wrap.access_label ~base:base'.type_expression ~label:path in
|
||||
return_wrapped (E_record_accessor {record=base';path}) state' wrapped
|
||||
)
|
||||
@ -481,28 +751,30 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
let%bind (expr' , state') = type_expression e state element in
|
||||
let%bind _assert = O.assert_type_expression_eq (expr'.type_expression, c_tv) in
|
||||
let wrapped = Wrap.constructor expr'.type_expression c_tv sum_tv in
|
||||
let constructor = convert_constructor' constructor in
|
||||
return_wrapped (E_constructor {constructor; element=expr'}) state' wrapped
|
||||
|
||||
(* Record *)
|
||||
| E_record m ->
|
||||
let aux (acc, state) k expr =
|
||||
let%bind (expr' , state') = type_expression e state expr in
|
||||
ok (I.LMap.add k expr' acc , state')
|
||||
ok (O.LMap.add (convert_label k) expr' acc , state')
|
||||
in
|
||||
let%bind (m' , state') = Stage_common.Helpers.bind_fold_lmap aux (ok (I.LMap.empty , state)) m in
|
||||
let wrapped = Wrap.record (I.LMap.map get_type_expression m') in
|
||||
let%bind (m' , state') = Stage_common.Helpers.bind_fold_lmap aux (ok (O.LMap.empty , state)) m in
|
||||
let wrapped = Wrap.record (O.LMap.map get_type_expression m') in
|
||||
return_wrapped (E_record m') state' wrapped
|
||||
| E_record_update {record; path; update} ->
|
||||
let%bind (record, state) = type_expression e state record in
|
||||
let%bind (update,state) = type_expression e state update in
|
||||
let wrapped = get_type_expression record in
|
||||
let path = convert_label path in
|
||||
let%bind (wrapped,tv) =
|
||||
match wrapped.type_content with
|
||||
| T_record record -> (
|
||||
let field_op = I.LMap.find_opt path record in
|
||||
let field_op = O.LMap.find_opt path record in
|
||||
match field_op with
|
||||
| Some tv -> ok (record,tv)
|
||||
| None -> failwith @@ Format.asprintf "field %a is not part of record" Stage_common.PP.label path
|
||||
| None -> failwith @@ Format.asprintf "field %a is not part of record" O.PP.label path
|
||||
)
|
||||
| _ -> failwith "Update an expression which is not a record"
|
||||
in
|
||||
@ -510,6 +782,7 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
return_wrapped (E_record_update {record; path; update}) state (Wrap.record wrapped)
|
||||
(* Data-structure *)
|
||||
|
||||
|
||||
(* | E_lambda {
|
||||
* binder ;
|
||||
* input_type ;
|
||||
@ -558,7 +831,6 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
let wrapped = Wrap.application f'.type_expression args.type_expression in
|
||||
return_wrapped (E_application {lamb=f';args}) state'' wrapped
|
||||
|
||||
|
||||
(* Advanced *)
|
||||
(* | E_matching (ex, m) -> (
|
||||
* let%bind ex' = type_expression e ex in
|
||||
@ -608,13 +880,13 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
let%bind (ex' , state') = type_expression e state matchee in
|
||||
let%bind (m' , state'') = type_match e state' ex'.type_expression cases ae ae.location in
|
||||
let tvs =
|
||||
let aux (cur:(O.expression, O.type_expression) O.matching_content) =
|
||||
let aux (cur : O.matching_expr) =
|
||||
match cur with
|
||||
| Match_bool { match_true ; match_false } -> [ match_true ; match_false ]
|
||||
| Match_list { match_nil ; match_cons = (_ , _ , match_cons, _) } -> [ match_nil ; match_cons ]
|
||||
| Match_option { match_none ; match_some = (_ , match_some, _) } -> [ match_none ; match_some ]
|
||||
| Match_tuple ((_ , match_tuple), _) -> [ match_tuple ]
|
||||
| Match_variant (lst , _) -> List.map snd lst in
|
||||
| Match_list { match_nil ; match_cons = { hd=_ ; tl=_ ; body ; tv=_} } -> [ match_nil ; body ]
|
||||
| Match_option { match_none ; match_some = {opt=_; body; tv=_} } -> [ match_none ; body ]
|
||||
| Match_tuple { vars=_ ; body ; tvs=_ } -> [ body ]
|
||||
| Match_variant { cases ; tv=_ } -> List.map (fun ({constructor=_; pattern=_; body} : O.matching_content_case) -> body) cases in
|
||||
List.map get_type_expression @@ aux m' in
|
||||
let%bind () = match tvs with
|
||||
[] -> fail @@ match_empty_variant cases ae.location
|
||||
@ -667,7 +939,7 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_expression -
|
||||
return_wrapped (E_recursive {fun_name;fun_type;lambda}) state wrapped
|
||||
|
||||
| E_constant {cons_name=name; arguments=lst} ->
|
||||
let () = ignore (name , lst) in
|
||||
let name = convert_constant' name in
|
||||
let%bind t = Operators.Typer.Operators_types.constant_type name in
|
||||
let aux acc expr =
|
||||
let (lst , state) = acc in
|
||||
@ -705,6 +977,7 @@ and type_lambda e state {
|
||||
(* Advanced *)
|
||||
|
||||
and type_constant (name:I.constant') (lst:O.type_expression list) (tv_opt:O.type_expression option) : (O.constant' * O.type_expression) result =
|
||||
let name = convert_constant' name in
|
||||
let%bind typer = Operators.Typer.constant_typers name in
|
||||
let%bind tv = typer lst tv_opt in
|
||||
ok(name, tv)
|
||||
@ -814,13 +1087,21 @@ let rec untype_type_expression (t:O.type_expression) : (I.type_expression) resul
|
||||
(* TODO: or should we use t.core if present? *)
|
||||
let%bind t = match t.type_content with
|
||||
| O.T_sum x ->
|
||||
let%bind x' = Stage_common.Helpers.bind_map_cmap untype_type_expression x in
|
||||
let aux k v acc =
|
||||
let%bind acc = acc in
|
||||
let%bind v' = untype_type_expression v in
|
||||
ok @@ I.CMap.add (unconvert_constructor' k) v' acc in
|
||||
let%bind x' = O.CMap.fold aux x (ok I.CMap.empty) in
|
||||
ok @@ I.T_sum x'
|
||||
| O.T_record x ->
|
||||
let%bind x' = Stage_common.Helpers.bind_map_lmap untype_type_expression x in
|
||||
let aux k v acc =
|
||||
let%bind acc = acc in
|
||||
let%bind v' = untype_type_expression v in
|
||||
ok @@ I.LMap.add (unconvert_label k) v' acc in
|
||||
let%bind x' = O.LMap.fold aux x (ok I.LMap.empty) in
|
||||
ok @@ I.T_record x'
|
||||
| O.T_constant (tag) ->
|
||||
ok @@ I.T_constant (tag)
|
||||
ok @@ I.T_constant (unconvert_type_constant tag)
|
||||
| O.T_variable (name) -> ok @@ I.T_variable (name) (* TODO: is this the right conversion? *)
|
||||
| O.T_arrow {type1;type2} ->
|
||||
let%bind type1 = untype_type_expression type1 in
|
||||
@ -837,23 +1118,23 @@ let rec untype_type_expression (t:O.type_expression) : (I.type_expression) resul
|
||||
| O.TC_set t ->
|
||||
let%bind t' = untype_type_expression t in
|
||||
ok @@ I.TC_set t'
|
||||
| O.TC_map (k,v) ->
|
||||
| O.TC_map {k;v} ->
|
||||
let%bind k = untype_type_expression k in
|
||||
let%bind v = untype_type_expression v in
|
||||
ok @@ I.TC_map (k,v)
|
||||
| O.TC_big_map (k,v) ->
|
||||
| O.TC_big_map {k;v} ->
|
||||
let%bind k = untype_type_expression k in
|
||||
let%bind v = untype_type_expression v in
|
||||
ok @@ I.TC_big_map (k,v)
|
||||
| O.TC_map_or_big_map (k,v) ->
|
||||
| O.TC_map_or_big_map {k;v} ->
|
||||
let%bind k = untype_type_expression k in
|
||||
let%bind v = untype_type_expression v in
|
||||
ok @@ I.TC_map_or_big_map (k,v)
|
||||
| O.TC_michelson_or (l,r) ->
|
||||
| O.TC_michelson_or {l;r} ->
|
||||
let%bind l = untype_type_expression l in
|
||||
let%bind r = untype_type_expression r in
|
||||
ok @@ I.TC_michelson_or (l,r)
|
||||
| O.TC_arrow ( arg , ret ) ->
|
||||
| O.TC_arrow { type1=arg ; type2=ret } ->
|
||||
let%bind arg' = untype_type_expression arg in
|
||||
let%bind ret' = untype_type_expression ret in
|
||||
ok @@ I.TC_arrow ( arg' , ret' )
|
||||
@ -904,7 +1185,7 @@ let rec untype_expression (e:O.expression) : (I.expression) result =
|
||||
return (e_literal l)
|
||||
| E_constant {cons_name;arguments} ->
|
||||
let%bind lst' = bind_map_list untype_expression arguments in
|
||||
return (e_constant cons_name lst')
|
||||
return (e_constant (unconvert_constant' cons_name) lst')
|
||||
| E_variable (n) ->
|
||||
return (e_variable (n))
|
||||
| E_application {lamb;args} ->
|
||||
@ -920,8 +1201,8 @@ let rec untype_expression (e:O.expression) : (I.expression) result =
|
||||
let Constructor n = constructor in
|
||||
return (e_constructor n p')
|
||||
| E_record r ->
|
||||
let r = LMap.to_kv_list r in
|
||||
let%bind r' = bind_map_list (fun (k,e) -> let%bind e = untype_expression e in ok (k,e)) r in
|
||||
let r = O.LMap.to_kv_list r in
|
||||
let%bind r' = bind_map_list (fun (O.Label k,e) -> let%bind e = untype_expression e in ok (I.Label k,e)) r in
|
||||
return (e_record @@ LMap.of_list r')
|
||||
| E_record_accessor {record; path} ->
|
||||
let%bind r' = untype_expression record in
|
||||
@ -930,7 +1211,7 @@ let rec untype_expression (e:O.expression) : (I.expression) result =
|
||||
| E_record_update {record; path; update} ->
|
||||
let%bind r' = untype_expression record in
|
||||
let%bind e = untype_expression update in
|
||||
return (e_record_update r' path e)
|
||||
return (e_record_update r' (unconvert_label path) e)
|
||||
| E_matching {matchee;cases} ->
|
||||
let%bind ae' = untype_expression matchee in
|
||||
let%bind m' = untype_matching untype_expression cases in
|
||||
@ -964,22 +1245,22 @@ and untype_matching : (O.expression -> I.expression result) -> O.matching_expr -
|
||||
let%bind match_true = f match_true in
|
||||
let%bind match_false = f match_false in
|
||||
ok @@ Match_bool {match_true ; match_false}
|
||||
| Match_tuple ((lst, b),_) ->
|
||||
let%bind b = f b in
|
||||
ok @@ I.Match_tuple ((lst, b),[])
|
||||
| Match_option {match_none ; match_some = (v, some,_)} ->
|
||||
| Match_tuple { vars ; body ; tvs=_ } ->
|
||||
let%bind b = f body in
|
||||
ok @@ I.Match_tuple ((vars, b),[])
|
||||
| Match_option {match_none ; match_some = {opt; body;tv=_}} ->
|
||||
let%bind match_none = f match_none in
|
||||
let%bind some = f some in
|
||||
let match_some = v, some, () in
|
||||
let%bind some = f body in
|
||||
let match_some = opt, some, () in
|
||||
ok @@ Match_option {match_none ; match_some}
|
||||
| Match_list {match_nil ; match_cons = (hd_name, tl_name, cons,_)} ->
|
||||
| Match_list {match_nil ; match_cons = {hd;tl;body;tv=_}} ->
|
||||
let%bind match_nil = f match_nil in
|
||||
let%bind cons = f cons in
|
||||
let match_cons = hd_name , tl_name , cons, () in
|
||||
let%bind cons = f body in
|
||||
let match_cons = hd , tl , cons, () in
|
||||
ok @@ Match_list {match_nil ; match_cons}
|
||||
| Match_variant (lst , _) ->
|
||||
let aux ((a,b),c) =
|
||||
let%bind c' = f c in
|
||||
ok ((a,b),c') in
|
||||
let%bind lst' = bind_map_list aux lst in
|
||||
| Match_variant { cases ; tv=_ } ->
|
||||
let aux ({constructor;pattern;body} : O.matching_content_case) =
|
||||
let%bind body = f body in
|
||||
ok ((unconvert_constructor' constructor,pattern),body) in
|
||||
let%bind lst' = bind_map_list aux cases in
|
||||
ok @@ Match_variant (lst',())
|
||||
|
@ -217,6 +217,256 @@ module Errors = struct
|
||||
end
|
||||
open Errors
|
||||
|
||||
let convert_constructor' (I.Constructor c) = O.Constructor c
|
||||
let unconvert_constructor' (O.Constructor c) = I.Constructor c
|
||||
let convert_label (I.Label c) = O.Label c
|
||||
let convert_type_constant : I.type_constant -> O.type_constant = function
|
||||
| TC_unit -> TC_unit
|
||||
| TC_string -> TC_string
|
||||
| TC_bytes -> TC_bytes
|
||||
| TC_nat -> TC_nat
|
||||
| TC_int -> TC_int
|
||||
| TC_mutez -> TC_mutez
|
||||
| TC_bool -> TC_bool
|
||||
| TC_operation -> TC_operation
|
||||
| TC_address -> TC_address
|
||||
| TC_key -> TC_key
|
||||
| TC_key_hash -> TC_key_hash
|
||||
| TC_chain_id -> TC_chain_id
|
||||
| TC_signature -> TC_signature
|
||||
| TC_timestamp -> TC_timestamp
|
||||
| TC_void -> TC_void
|
||||
|
||||
let convert_constant' : I.constant' -> O.constant' = function
|
||||
| C_INT -> C_INT
|
||||
| C_UNIT -> C_UNIT
|
||||
| C_NIL -> C_NIL
|
||||
| C_NOW -> C_NOW
|
||||
| C_IS_NAT -> C_IS_NAT
|
||||
| C_SOME -> C_SOME
|
||||
| C_NONE -> C_NONE
|
||||
| C_ASSERTION -> C_ASSERTION
|
||||
| C_ASSERT_INFERRED -> C_ASSERT_INFERRED
|
||||
| C_FAILWITH -> C_FAILWITH
|
||||
| C_UPDATE -> C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER -> C_ITER
|
||||
| C_FOLD_WHILE -> C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE -> C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP -> C_FOLD_STOP
|
||||
| C_LOOP_LEFT -> C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE -> C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP -> C_LOOP_STOP
|
||||
| C_FOLD -> C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG -> C_NEG
|
||||
| C_ABS -> C_ABS
|
||||
| C_ADD -> C_ADD
|
||||
| C_SUB -> C_SUB
|
||||
| C_MUL -> C_MUL
|
||||
| C_EDIV -> C_EDIV
|
||||
| C_DIV -> C_DIV
|
||||
| C_MOD -> C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT -> C_NOT
|
||||
| C_AND -> C_AND
|
||||
| C_OR -> C_OR
|
||||
| C_XOR -> C_XOR
|
||||
| C_LSL -> C_LSL
|
||||
| C_LSR -> C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> C_EQ
|
||||
| C_NEQ -> C_NEQ
|
||||
| C_LT -> C_LT
|
||||
| C_GT -> C_GT
|
||||
| C_LE -> C_LE
|
||||
| C_GE -> C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> C_SIZE
|
||||
| C_CONCAT -> C_CONCAT
|
||||
| C_SLICE -> C_SLICE
|
||||
| C_BYTES_PACK -> C_BYTES_PACK
|
||||
| C_BYTES_UNPACK -> C_BYTES_UNPACK
|
||||
| C_CONS -> C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR -> C_PAIR
|
||||
| C_CAR -> C_CAR
|
||||
| C_CDR -> C_CDR
|
||||
| C_LEFT -> C_LEFT
|
||||
| C_RIGHT -> C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> C_SET_EMPTY
|
||||
| C_SET_LITERAL -> C_SET_LITERAL
|
||||
| C_SET_ADD -> C_SET_ADD
|
||||
| C_SET_REMOVE -> C_SET_REMOVE
|
||||
| C_SET_ITER -> C_SET_ITER
|
||||
| C_SET_FOLD -> C_SET_FOLD
|
||||
| C_SET_MEM -> C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> C_LIST_EMPTY
|
||||
| C_LIST_LITERAL -> C_LIST_LITERAL
|
||||
| C_LIST_ITER -> C_LIST_ITER
|
||||
| C_LIST_MAP -> C_LIST_MAP
|
||||
| C_LIST_FOLD -> C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP -> C_MAP
|
||||
| C_MAP_EMPTY -> C_MAP_EMPTY
|
||||
| C_MAP_LITERAL -> C_MAP_LITERAL
|
||||
| C_MAP_GET -> C_MAP_GET
|
||||
| C_MAP_GET_FORCE -> C_MAP_GET_FORCE
|
||||
| C_MAP_ADD -> C_MAP_ADD
|
||||
| C_MAP_REMOVE -> C_MAP_REMOVE
|
||||
| C_MAP_UPDATE -> C_MAP_UPDATE
|
||||
| C_MAP_ITER -> C_MAP_ITER
|
||||
| C_MAP_MAP -> C_MAP_MAP
|
||||
| C_MAP_FOLD -> C_MAP_FOLD
|
||||
| C_MAP_MEM -> C_MAP_MEM
|
||||
| C_MAP_FIND -> C_MAP_FIND
|
||||
| C_MAP_FIND_OPT -> C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY -> C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL -> C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> C_SHA256
|
||||
| C_SHA512 -> C_SHA512
|
||||
| C_BLAKE2b -> C_BLAKE2b
|
||||
| C_HASH -> C_HASH
|
||||
| C_HASH_KEY -> C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE -> C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID -> C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL -> C_CALL
|
||||
| C_CONTRACT -> C_CONTRACT
|
||||
| C_CONTRACT_OPT -> C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT -> C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT -> C_AMOUNT
|
||||
| C_BALANCE -> C_BALANCE
|
||||
| C_SOURCE -> C_SOURCE
|
||||
| C_SENDER -> C_SENDER
|
||||
| C_ADDRESS -> C_ADDRESS
|
||||
| C_SELF -> C_SELF
|
||||
| C_SELF_ADDRESS -> C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT -> C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE -> C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT -> C_CREATE_CONTRACT
|
||||
|
||||
let unconvert_constant' : O.constant' -> I.constant' = function
|
||||
| C_INT -> C_INT
|
||||
| C_UNIT -> C_UNIT
|
||||
| C_NIL -> C_NIL
|
||||
| C_NOW -> C_NOW
|
||||
| C_IS_NAT -> C_IS_NAT
|
||||
| C_SOME -> C_SOME
|
||||
| C_NONE -> C_NONE
|
||||
| C_ASSERTION -> C_ASSERTION
|
||||
| C_ASSERT_INFERRED -> C_ASSERT_INFERRED
|
||||
| C_FAILWITH -> C_FAILWITH
|
||||
| C_UPDATE -> C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER -> C_ITER
|
||||
| C_FOLD_WHILE -> C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE -> C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP -> C_FOLD_STOP
|
||||
| C_LOOP_LEFT -> C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE -> C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP -> C_LOOP_STOP
|
||||
| C_FOLD -> C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG -> C_NEG
|
||||
| C_ABS -> C_ABS
|
||||
| C_ADD -> C_ADD
|
||||
| C_SUB -> C_SUB
|
||||
| C_MUL -> C_MUL
|
||||
| C_EDIV -> C_EDIV
|
||||
| C_DIV -> C_DIV
|
||||
| C_MOD -> C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT -> C_NOT
|
||||
| C_AND -> C_AND
|
||||
| C_OR -> C_OR
|
||||
| C_XOR -> C_XOR
|
||||
| C_LSL -> C_LSL
|
||||
| C_LSR -> C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> C_EQ
|
||||
| C_NEQ -> C_NEQ
|
||||
| C_LT -> C_LT
|
||||
| C_GT -> C_GT
|
||||
| C_LE -> C_LE
|
||||
| C_GE -> C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> C_SIZE
|
||||
| C_CONCAT -> C_CONCAT
|
||||
| C_SLICE -> C_SLICE
|
||||
| C_BYTES_PACK -> C_BYTES_PACK
|
||||
| C_BYTES_UNPACK -> C_BYTES_UNPACK
|
||||
| C_CONS -> C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR -> C_PAIR
|
||||
| C_CAR -> C_CAR
|
||||
| C_CDR -> C_CDR
|
||||
| C_LEFT -> C_LEFT
|
||||
| C_RIGHT -> C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> C_SET_EMPTY
|
||||
| C_SET_LITERAL -> C_SET_LITERAL
|
||||
| C_SET_ADD -> C_SET_ADD
|
||||
| C_SET_REMOVE -> C_SET_REMOVE
|
||||
| C_SET_ITER -> C_SET_ITER
|
||||
| C_SET_FOLD -> C_SET_FOLD
|
||||
| C_SET_MEM -> C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> C_LIST_EMPTY
|
||||
| C_LIST_LITERAL -> C_LIST_LITERAL
|
||||
| C_LIST_ITER -> C_LIST_ITER
|
||||
| C_LIST_MAP -> C_LIST_MAP
|
||||
| C_LIST_FOLD -> C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP -> C_MAP
|
||||
| C_MAP_EMPTY -> C_MAP_EMPTY
|
||||
| C_MAP_LITERAL -> C_MAP_LITERAL
|
||||
| C_MAP_GET -> C_MAP_GET
|
||||
| C_MAP_GET_FORCE -> C_MAP_GET_FORCE
|
||||
| C_MAP_ADD -> C_MAP_ADD
|
||||
| C_MAP_REMOVE -> C_MAP_REMOVE
|
||||
| C_MAP_UPDATE -> C_MAP_UPDATE
|
||||
| C_MAP_ITER -> C_MAP_ITER
|
||||
| C_MAP_MAP -> C_MAP_MAP
|
||||
| C_MAP_FOLD -> C_MAP_FOLD
|
||||
| C_MAP_MEM -> C_MAP_MEM
|
||||
| C_MAP_FIND -> C_MAP_FIND
|
||||
| C_MAP_FIND_OPT -> C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY -> C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL -> C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> C_SHA256
|
||||
| C_SHA512 -> C_SHA512
|
||||
| C_BLAKE2b -> C_BLAKE2b
|
||||
| C_HASH -> C_HASH
|
||||
| C_HASH_KEY -> C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE -> C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID -> C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL -> C_CALL
|
||||
| C_CONTRACT -> C_CONTRACT
|
||||
| C_CONTRACT_OPT -> C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT -> C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT -> C_AMOUNT
|
||||
| C_BALANCE -> C_BALANCE
|
||||
| C_SOURCE -> C_SOURCE
|
||||
| C_SENDER -> C_SENDER
|
||||
| C_ADDRESS -> C_ADDRESS
|
||||
| C_SELF -> C_SELF
|
||||
| C_SELF_ADDRESS -> C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT -> C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE -> C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT -> C_CREATE_CONTRACT
|
||||
|
||||
let rec type_program (p:I.program) : (O.program * Solver.state) result =
|
||||
let aux (e, acc:(environment * O.declaration Location.wrap list)) (d:I.declaration Location.wrap) =
|
||||
let%bind ed' = (bind_map_location (type_declaration e (Solver.placeholder_for_state_of_new_typer ()))) d in
|
||||
@ -236,13 +486,13 @@ and type_declaration env (_placeholder_for_state_of_new_typer : Solver.state) :
|
||||
let%bind tv = evaluate_type env type_expression in
|
||||
let env' = Environment.add_type (type_name) tv env in
|
||||
ok (env', (Solver.placeholder_for_state_of_new_typer ()) , None)
|
||||
| Declaration_constant (name , tv_opt , inline, expression) -> (
|
||||
| Declaration_constant (binder , tv_opt , inline, expression) -> (
|
||||
let%bind tv'_opt = bind_map_option (evaluate_type env) tv_opt in
|
||||
let%bind ae' =
|
||||
trace (constant_declaration_error name expression tv'_opt) @@
|
||||
let%bind expr =
|
||||
trace (constant_declaration_error binder expression tv'_opt) @@
|
||||
type_expression' ?tv_opt:tv'_opt env expression in
|
||||
let env' = Environment.add_ez_ae name ae' env in
|
||||
ok (env', (Solver.placeholder_for_state_of_new_typer ()) , Some (O.Declaration_constant (name,ae', inline, env')))
|
||||
let post_env = Environment.add_ez_ae binder expr env in
|
||||
ok (post_env, (Solver.placeholder_for_state_of_new_typer ()) , Some (O.Declaration_constant { binder ; expr ; inline ; post_env}))
|
||||
)
|
||||
|
||||
and type_match : (environment -> I.expression -> O.expression result) -> environment -> O.type_expression -> I.matching_expr -> I.expression -> Location.t -> O.matching_expr result =
|
||||
@ -255,14 +505,14 @@ and type_match : (environment -> I.expression -> O.expression result) -> environ
|
||||
let%bind match_false = f e match_false in
|
||||
ok (O.Match_bool {match_true ; match_false})
|
||||
| Match_option {match_none ; match_some} ->
|
||||
let%bind t_opt =
|
||||
let%bind tv =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@@ get_t_option t in
|
||||
let%bind match_none = f e match_none in
|
||||
let (n, b,_) = match_some in
|
||||
let e' = Environment.add_ez_binder n t_opt e in
|
||||
let%bind b' = f e' b in
|
||||
ok (O.Match_option {match_none ; match_some = (n, b', t_opt)})
|
||||
let (opt, b,_) = match_some in
|
||||
let e' = Environment.add_ez_binder opt tv e in
|
||||
let%bind body = f e' b in
|
||||
ok (O.Match_option {match_none ; match_some = {opt; body; tv}})
|
||||
| Match_list {match_nil ; match_cons} ->
|
||||
let%bind t_elt =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@ -271,19 +521,19 @@ and type_match : (environment -> I.expression -> O.expression result) -> environ
|
||||
let (hd, tl, b,_) = match_cons in
|
||||
let e' = Environment.add_ez_binder hd t_elt e in
|
||||
let e' = Environment.add_ez_binder tl t e' in
|
||||
let%bind b' = f e' b in
|
||||
ok (O.Match_list {match_nil ; match_cons = (hd, tl, b', t_elt)})
|
||||
| Match_tuple ((lst, b),_) ->
|
||||
let%bind t_tuple =
|
||||
let%bind body = f e' b in
|
||||
ok (O.Match_list {match_nil ; match_cons = {hd; tl; body; tv=t_elt}})
|
||||
| Match_tuple ((vars, b),_) ->
|
||||
let%bind tvs =
|
||||
trace_strong (match_error ~expected:i ~actual:t loc)
|
||||
@@ get_t_tuple t in
|
||||
let%bind lst' =
|
||||
generic_try (match_tuple_wrong_arity t_tuple lst loc)
|
||||
@@ (fun () -> List.combine lst t_tuple) in
|
||||
let%bind vars' =
|
||||
generic_try (match_tuple_wrong_arity tvs vars loc)
|
||||
@@ (fun () -> List.combine vars tvs) in
|
||||
let aux prev (name, tv) = Environment.add_ez_binder name tv prev in
|
||||
let e' = List.fold_left aux e lst' in
|
||||
let%bind b' = f e' b in
|
||||
ok (O.Match_tuple ((lst, b'),t_tuple))
|
||||
let e' = List.fold_left aux e vars' in
|
||||
let%bind body = f e' b in
|
||||
ok (O.Match_tuple { vars ; body ; tvs})
|
||||
| Match_variant (lst,_) ->
|
||||
let%bind variant_opt =
|
||||
let aux acc ((constructor_name , _) , _) =
|
||||
@ -306,15 +556,15 @@ and type_match : (environment -> I.expression -> O.expression result) -> environ
|
||||
ok acc in
|
||||
trace (simple_info "in match variant") @@
|
||||
bind_fold_list aux None lst in
|
||||
let%bind variant =
|
||||
let%bind tv =
|
||||
trace_option (match_empty_variant i loc) @@
|
||||
variant_opt in
|
||||
let%bind () =
|
||||
let%bind variant_cases' =
|
||||
trace (match_error ~expected:i ~actual:t loc)
|
||||
@@ Ast_typed.Combinators.get_t_sum variant in
|
||||
let variant_cases = List.map fst @@ I.CMap.to_kv_list variant_cases' in
|
||||
let match_cases = List.map (Function.compose fst fst) lst in
|
||||
@@ Ast_typed.Combinators.get_t_sum tv in
|
||||
let variant_cases = List.map fst @@ O.CMap.to_kv_list variant_cases' in
|
||||
let match_cases = List.map (fun x -> convert_constructor' @@ fst @@ fst x) lst in
|
||||
let test_case = fun c ->
|
||||
Assert.assert_true (List.mem c match_cases)
|
||||
in
|
||||
@ -326,17 +576,18 @@ and type_match : (environment -> I.expression -> O.expression result) -> environ
|
||||
Assert.assert_true List.(length variant_cases = length match_cases) in
|
||||
ok ()
|
||||
in
|
||||
let%bind lst' =
|
||||
let aux ((constructor_name , name) , b) =
|
||||
let%bind cases =
|
||||
let aux ((constructor_name , pattern) , b) =
|
||||
let%bind (constructor , _) =
|
||||
trace_option (unbound_constructor e constructor_name loc) @@
|
||||
Environment.get_constructor constructor_name e in
|
||||
let e' = Environment.add_ez_binder name constructor e in
|
||||
let%bind b' = f e' b in
|
||||
ok ((constructor_name , name) , b')
|
||||
let e' = Environment.add_ez_binder pattern constructor e in
|
||||
let%bind body = f e' b in
|
||||
let constructor = convert_constructor' constructor_name in
|
||||
ok ({constructor ; pattern ; body} : O.matching_content_case)
|
||||
in
|
||||
bind_map_list aux lst in
|
||||
ok (O.Match_variant (lst' , variant))
|
||||
ok (O.Match_variant { cases ; tv })
|
||||
|
||||
and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression result =
|
||||
let return tv' = ok (make_t tv' (Some t)) in
|
||||
@ -355,17 +606,17 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
ok ()
|
||||
else fail (redundant_constructor e k)
|
||||
| None -> ok () in
|
||||
ok @@ I.CMap.add k v' prev'
|
||||
ok @@ O.CMap.add (convert_constructor' k) v' prev'
|
||||
in
|
||||
let%bind m = I.CMap.fold aux m (ok I.CMap.empty) in
|
||||
let%bind m = I.CMap.fold aux m (ok O.CMap.empty) in
|
||||
return (T_sum m)
|
||||
| T_record m ->
|
||||
let aux k v prev =
|
||||
let%bind prev' = prev in
|
||||
let%bind v' = evaluate_type e v in
|
||||
ok @@ I.LMap.add k v' prev'
|
||||
ok @@ O.LMap.add (convert_label k) v' prev'
|
||||
in
|
||||
let%bind m = I.LMap.fold aux m (ok I.LMap.empty) in
|
||||
let%bind m = I.LMap.fold aux m (ok O.LMap.empty) in
|
||||
return (T_record m)
|
||||
| T_variable name ->
|
||||
let%bind tv =
|
||||
@ -373,7 +624,7 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
@@ Environment.get_type_opt (name) e in
|
||||
ok tv
|
||||
| T_constant cst ->
|
||||
return (T_constant cst)
|
||||
return (T_constant (convert_type_constant cst))
|
||||
| T_operator opt ->
|
||||
let%bind opt = match opt with
|
||||
| TC_set s ->
|
||||
@ -388,23 +639,23 @@ and evaluate_type (e:environment) (t:I.type_expression) : O.type_expression resu
|
||||
| TC_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_map (k,v)
|
||||
ok @@ O.TC_map {k;v}
|
||||
| TC_big_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_big_map (k,v)
|
||||
ok @@ O.TC_big_map {k;v}
|
||||
| TC_map_or_big_map (k,v) ->
|
||||
let%bind k = evaluate_type e k in
|
||||
let%bind v = evaluate_type e v in
|
||||
ok @@ O.TC_map_or_big_map (k,v)
|
||||
ok @@ O.TC_map_or_big_map {k;v}
|
||||
| TC_michelson_or (l,r) ->
|
||||
let%bind l = evaluate_type e l in
|
||||
let%bind r = evaluate_type e r in
|
||||
ok @@ O.TC_michelson_or (l,r)
|
||||
ok @@ O.TC_michelson_or {l;r}
|
||||
| TC_arrow ( arg , ret ) ->
|
||||
let%bind arg' = evaluate_type e arg in
|
||||
let%bind ret' = evaluate_type e ret in
|
||||
ok @@ O.TC_arrow ( arg' , ret' )
|
||||
ok @@ O.TC_arrow { type1=arg' ; type2=ret' }
|
||||
| TC_contract c ->
|
||||
let%bind c = evaluate_type e c in
|
||||
ok @@ O.TC_contract c
|
||||
@ -477,9 +728,9 @@ and type_expression' : environment -> ?tv_opt:O.type_expression -> I.expression
|
||||
let%bind r_tv = get_t_record prev.type_expression in
|
||||
let%bind tv =
|
||||
generic_try (bad_record_access property ae prev.type_expression ae.location)
|
||||
@@ (fun () -> I.LMap.find property r_tv) in
|
||||
@@ (fun () -> O.LMap.find (convert_label property) r_tv) in
|
||||
let location = ae.location in
|
||||
ok @@ make_e ~location (E_record_accessor {record=prev; path=property}) tv e
|
||||
ok @@ make_e ~location (E_record_accessor {record=prev; path=convert_label property}) tv e
|
||||
in
|
||||
let%bind ae =
|
||||
trace (simple_info "accessing") @@ aux e' path in
|
||||
@ -494,7 +745,7 @@ and type_expression' : environment -> ?tv_opt:O.type_expression -> I.expression
|
||||
let%bind expr' = type_expression' e element in
|
||||
( match t.type_content with
|
||||
| T_sum c ->
|
||||
let ct = I.CMap.find (I.Constructor s) c in
|
||||
let ct = O.CMap.find (O.Constructor s) c in
|
||||
let%bind _assert = O.assert_type_expression_eq (expr'.type_expression, ct) in
|
||||
return (E_constructor {constructor = Constructor s; element=expr'}) t
|
||||
| _ -> simple_fail "ll"
|
||||
@ -515,27 +766,28 @@ and type_expression' : environment -> ?tv_opt:O.type_expression -> I.expression
|
||||
Environment.get_constructor constructor e in
|
||||
let%bind expr' = type_expression' e element in
|
||||
let%bind _assert = O.assert_type_expression_eq (expr'.type_expression, c_tv) in
|
||||
let constructor = convert_constructor' constructor in
|
||||
return (E_constructor {constructor; element=expr'}) sum_tv
|
||||
(* Record *)
|
||||
| E_record m ->
|
||||
let aux prev k expr =
|
||||
let%bind expr' = type_expression' e expr in
|
||||
ok (I.LMap.add k expr' prev)
|
||||
ok (O.LMap.add (convert_label k) expr' prev)
|
||||
in
|
||||
let%bind m' = Stage_common.Helpers.bind_fold_lmap aux (ok I.LMap.empty) m in
|
||||
return (E_record m') (t_record (I.LMap.map get_type_expression m') ())
|
||||
let%bind m' = Stage_common.Helpers.bind_fold_lmap aux (ok O.LMap.empty) m in
|
||||
return (E_record m') (t_record (O.LMap.map get_type_expression m') ())
|
||||
| E_record_update {record; path; update} ->
|
||||
|
||||
let path = convert_label path in
|
||||
let%bind record = type_expression' e record in
|
||||
let%bind update = type_expression' e update in
|
||||
let wrapped = get_type_expression record in
|
||||
let%bind tv =
|
||||
match wrapped.type_content with
|
||||
let%bind tv =
|
||||
match wrapped.type_content with
|
||||
| T_record record -> (
|
||||
let field_op = I.LMap.find_opt path record in
|
||||
let field_op = O.LMap.find_opt path record in
|
||||
match field_op with
|
||||
| Some tv -> ok (tv)
|
||||
| None -> failwith @@ Format.asprintf "field %a is not part of record %a" Stage_common.PP.label path O.PP.type_expression wrapped
|
||||
| None -> failwith @@ Format.asprintf "field %a is not part of record %a" Ast_typed.PP.label path O.PP.type_expression wrapped
|
||||
)
|
||||
| _ -> failwith "Update an expression which is not a record"
|
||||
in
|
||||
@ -558,16 +810,15 @@ and type_expression' : environment -> ?tv_opt:O.type_expression -> I.expression
|
||||
(* this special case is here force annotation of the untyped lambda
|
||||
generated by pascaligo's for_collect loop *)
|
||||
let%bind (v_col , v_initr ) = bind_map_pair (type_expression' e) (collect , init_record ) in
|
||||
let tv_col = get_type_expression v_col in (* this is the type of the collection *)
|
||||
let tv_col = get_type_expression v_col in (* this is the type of the collection *)
|
||||
let tv_out = get_type_expression v_initr in (* this is the output type of the lambda*)
|
||||
let%bind input_type = match tv_col.type_content with
|
||||
| O.T_operator ( TC_list t | TC_set t) -> ok @@ make_t_ez_record (("0",tv_out)::[("1",t)])
|
||||
| O.T_operator ( TC_map (k,v)| TC_big_map (k,v)) -> ok @@ make_t_ez_record (("0",tv_out)::[("1",make_t_ez_record [("0",k);("1",v)])])
|
||||
| O.T_operator ( TC_map {k;v}| TC_big_map {k;v}) -> ok @@ make_t_ez_record (("0",tv_out)::[("1",make_t_ez_record [("0",k);("1",v)])])
|
||||
| _ ->
|
||||
let wtype = Format.asprintf
|
||||
"Loops over collections expect lists, sets or maps, got type %a" O.PP.type_expression tv_col in
|
||||
fail @@ simple_error wtype in
|
||||
let lname = lname in
|
||||
let e' = Environment.add_ez_binder lname input_type e in
|
||||
let%bind body = type_expression' ?tv_opt:(Some tv_out) e' result in
|
||||
let output_type = body.type_expression in
|
||||
@ -669,10 +920,10 @@ and type_expression' : environment -> ?tv_opt:O.type_expression -> I.expression
|
||||
let aux (cur:O.matching_expr) =
|
||||
match cur with
|
||||
| Match_bool { match_true ; match_false } -> [ match_true ; match_false ]
|
||||
| Match_list { match_nil ; match_cons = (_ , _ , match_cons, _) } -> [ match_nil ; match_cons ]
|
||||
| Match_option { match_none ; match_some = (_ , match_some, _) } -> [ match_none ; match_some ]
|
||||
| Match_tuple ((_ , match_tuple), _) -> [ match_tuple ]
|
||||
| Match_variant (lst , _) -> List.map snd lst in
|
||||
| Match_list { match_nil ; match_cons = {hd=_ ; tl=_ ; body ; tv=_} } -> [ match_nil ; body ]
|
||||
| Match_option { match_none ; match_some = {opt=_ ; body ; tv=_ } } -> [ match_none ; body ]
|
||||
| Match_tuple {vars=_;body;tvs=_} -> [ body ]
|
||||
| Match_variant {cases; tv=_} -> List.map (fun (c : O.matching_content_case) -> c.body) cases in
|
||||
List.map get_type_expression @@ aux m' in
|
||||
let aux prec cur =
|
||||
let%bind () =
|
||||
@ -751,6 +1002,7 @@ and type_lambda e {
|
||||
|
||||
|
||||
and type_constant (name:I.constant') (lst:O.type_expression list) (tv_opt:O.type_expression option) : (O.constant' * O.type_expression) result =
|
||||
let name = convert_constant' name in
|
||||
let%bind typer = Operators.Typer.constant_typers name in
|
||||
let%bind tv = typer lst tv_opt in
|
||||
ok(name, tv)
|
||||
@ -791,7 +1043,7 @@ let rec untype_expression (e:O.expression) : (I.expression) result =
|
||||
return (e_literal l)
|
||||
| E_constant {cons_name;arguments} ->
|
||||
let%bind lst' = bind_map_list untype_expression arguments in
|
||||
return (e_constant cons_name lst')
|
||||
return (e_constant (unconvert_constant' cons_name) lst')
|
||||
| E_variable n ->
|
||||
return (e_variable (n))
|
||||
| E_application {lamb;args} ->
|
||||
@ -809,17 +1061,17 @@ let rec untype_expression (e:O.expression) : (I.expression) result =
|
||||
let Constructor n = constructor in
|
||||
return (e_constructor n p')
|
||||
| E_record r ->
|
||||
let r = LMap.to_kv_list r in
|
||||
let%bind r' = bind_map_list (fun (k,e) -> let%bind e = untype_expression e in ok (k,e)) r in
|
||||
let r = O.LMap.to_kv_list r in
|
||||
let%bind r' = bind_map_list (fun (O.Label k,e) -> let%bind e = untype_expression e in ok (I.Label k,e)) r in
|
||||
return (e_record @@ LMap.of_list r')
|
||||
| E_record_accessor {record; path} ->
|
||||
let%bind r' = untype_expression record in
|
||||
let Label s = path in
|
||||
return (e_record_accessor r' s)
|
||||
| E_record_update {record=r; path=l; update=e} ->
|
||||
| E_record_update {record=r; path=O.Label l; update=e} ->
|
||||
let%bind r' = untype_expression r in
|
||||
let%bind e = untype_expression e in
|
||||
return (e_record_update r' l e)
|
||||
return (e_record_update r' (I.Label l) e)
|
||||
| E_matching {matchee;cases} ->
|
||||
let%bind ae' = untype_expression matchee in
|
||||
let%bind m' = untype_matching untype_expression cases in
|
||||
@ -842,22 +1094,22 @@ and untype_matching : (O.expression -> I.expression result) -> O.matching_expr -
|
||||
let%bind match_true = f match_true in
|
||||
let%bind match_false = f match_false in
|
||||
ok @@ Match_bool {match_true ; match_false}
|
||||
| Match_tuple ((lst, b),_) ->
|
||||
let%bind b = f b in
|
||||
ok @@ I.Match_tuple ((lst, b),[])
|
||||
| Match_option {match_none ; match_some = (v, some,_)} ->
|
||||
| Match_tuple {vars; body;tvs=_} ->
|
||||
let%bind b = f body in
|
||||
ok @@ I.Match_tuple ((vars, b),[])
|
||||
| Match_option {match_none ; match_some = {opt; body ; tv=_}} ->
|
||||
let%bind match_none = f match_none in
|
||||
let%bind some = f some in
|
||||
let match_some = v, some, () in
|
||||
let%bind some = f body in
|
||||
let match_some = opt, some, () in
|
||||
ok @@ Match_option {match_none ; match_some}
|
||||
| Match_list {match_nil ; match_cons = (hd_name, tl_name, cons,_)} ->
|
||||
| Match_list {match_nil ; match_cons = {hd ; tl ; body ; tv=_}} ->
|
||||
let%bind match_nil = f match_nil in
|
||||
let%bind cons = f cons in
|
||||
let match_cons = hd_name , tl_name , cons, () in
|
||||
let%bind cons = f body in
|
||||
let match_cons = hd , tl , cons, () in
|
||||
ok @@ Match_list {match_nil ; match_cons}
|
||||
| Match_variant (lst , _) ->
|
||||
let aux ((a,b),c) =
|
||||
let%bind c' = f c in
|
||||
ok ((a,b),c') in
|
||||
let%bind lst' = bind_map_list aux lst in
|
||||
| Match_variant {cases;tv=_} ->
|
||||
let aux ({constructor;pattern;body} : O.matching_content_case) =
|
||||
let%bind c' = f body in
|
||||
ok ((unconvert_constructor' constructor,pattern),c') in
|
||||
let%bind lst' = bind_map_list aux cases in
|
||||
ok @@ Match_variant (lst',())
|
||||
|
@ -1,4 +1,4 @@
|
||||
open Ast_typed
|
||||
open Ast_typed.Types
|
||||
open Trace
|
||||
|
||||
type contract_pass_data = {
|
||||
@ -63,7 +63,7 @@ let self_typing : contract_pass_data -> expression -> (bool * contract_pass_data
|
||||
| _ -> fail @@ Errors.entrypoint_annotation_not_literal entrypoint_exp.location in
|
||||
let%bind entrypoint_t = match dat.contract_type.parameter.type_content with
|
||||
| T_sum cmap -> trace_option (Errors.unmatched_entrypoint entrypoint_exp.location)
|
||||
@@ Stage_common.Types.CMap.find_opt (Constructor entrypoint) cmap
|
||||
@@ CMap.find_opt (Constructor entrypoint) cmap
|
||||
| t -> ok {dat.contract_type.parameter with type_content = t} in
|
||||
let%bind () =
|
||||
trace_strong (bad_self_err ()) @@
|
||||
|
@ -1,9 +1,9 @@
|
||||
open Ast_typed
|
||||
open Trace
|
||||
open Stage_common.Helpers
|
||||
open Ast_typed.Helpers
|
||||
|
||||
type 'a folder = 'a -> expression -> 'a result
|
||||
let rec fold_expression : 'a folder -> 'a -> expression -> 'a result = fun f init e ->
|
||||
let rec fold_expression : 'a . 'a folder -> 'a -> expression -> 'a result = fun f init e ->
|
||||
let self = fold_expression f in
|
||||
let%bind init' = f init e in
|
||||
match e.expression_content with
|
||||
@ -51,32 +51,32 @@ let rec fold_expression : 'a folder -> 'a -> expression -> 'a result = fun f ini
|
||||
ok res
|
||||
)
|
||||
|
||||
and fold_cases : 'a folder -> 'a -> matching_expr -> 'a result = fun f init m ->
|
||||
and fold_cases : 'a . 'a folder -> 'a -> matching_expr -> 'a result = fun f init m ->
|
||||
match m with
|
||||
| Match_bool { match_true ; match_false } -> (
|
||||
let%bind res = fold_expression f init match_true in
|
||||
let%bind res = fold_expression f res match_false in
|
||||
ok res
|
||||
)
|
||||
| Match_list { match_nil ; match_cons = (_ , _ , cons, _) } -> (
|
||||
| Match_list { match_nil ; match_cons = {hd=_; tl=_ ; body; tv=_} } -> (
|
||||
let%bind res = fold_expression f init match_nil in
|
||||
let%bind res = fold_expression f res cons in
|
||||
let%bind res = fold_expression f res body in
|
||||
ok res
|
||||
)
|
||||
| Match_option { match_none ; match_some = (_ , some, _) } -> (
|
||||
| Match_option { match_none ; match_some = {opt=_; body; tv=_} } -> (
|
||||
let%bind res = fold_expression f init match_none in
|
||||
let%bind res = fold_expression f res some in
|
||||
let%bind res = fold_expression f res body in
|
||||
ok res
|
||||
)
|
||||
| Match_tuple ((_ , e), _) -> (
|
||||
let%bind res = fold_expression f init e in
|
||||
| Match_tuple {vars=_ ; body; tvs=_} -> (
|
||||
let%bind res = fold_expression f init body in
|
||||
ok res
|
||||
)
|
||||
| Match_variant (lst, _) -> (
|
||||
let aux init' ((_ , _) , e) =
|
||||
let%bind res' = fold_expression f init' e in
|
||||
| Match_variant {cases;tv=_} -> (
|
||||
let aux init' {constructor=_; pattern=_ ; body} =
|
||||
let%bind res' = fold_expression f init' body in
|
||||
ok res' in
|
||||
let%bind res = bind_fold_list aux init lst in
|
||||
let%bind res = bind_fold_list aux init cases in
|
||||
ok res
|
||||
)
|
||||
|
||||
@ -139,42 +139,42 @@ and map_cases : mapper -> matching_expr -> matching_expr result = fun f m ->
|
||||
let%bind match_true = map_expression f match_true in
|
||||
let%bind match_false = map_expression f match_false in
|
||||
ok @@ Match_bool { match_true ; match_false }
|
||||
)
|
||||
| Match_list { match_nil ; match_cons = (hd , tl , cons, te) } -> (
|
||||
)
|
||||
| Match_list { match_nil ; match_cons = {hd ; tl ; body ; tv} } -> (
|
||||
let%bind match_nil = map_expression f match_nil in
|
||||
let%bind cons = map_expression f cons in
|
||||
ok @@ Match_list { match_nil ; match_cons = (hd , tl , cons, te) }
|
||||
let%bind body = map_expression f body in
|
||||
ok @@ Match_list { match_nil ; match_cons = {hd ; tl ; body; tv} }
|
||||
)
|
||||
| Match_option { match_none ; match_some = (name , some, te) } -> (
|
||||
| Match_option { match_none ; match_some = {opt ; body ; tv } } -> (
|
||||
let%bind match_none = map_expression f match_none in
|
||||
let%bind some = map_expression f some in
|
||||
ok @@ Match_option { match_none ; match_some = (name , some, te) }
|
||||
let%bind body = map_expression f body in
|
||||
ok @@ Match_option { match_none ; match_some = { opt ; body ; tv } }
|
||||
)
|
||||
| Match_tuple ((names , e), te) -> (
|
||||
let%bind e' = map_expression f e in
|
||||
ok @@ Match_tuple ((names , e'), te)
|
||||
| Match_tuple { vars ; body ; tvs } -> (
|
||||
let%bind body = map_expression f body in
|
||||
ok @@ Match_tuple { vars ; body ; tvs }
|
||||
)
|
||||
| Match_variant (lst, te) -> (
|
||||
let aux ((a , b) , e) =
|
||||
let%bind e' = map_expression f e in
|
||||
ok ((a , b) , e')
|
||||
| Match_variant {cases;tv} -> (
|
||||
let aux { constructor ; pattern ; body } =
|
||||
let%bind body = map_expression f body in
|
||||
ok {constructor;pattern;body}
|
||||
in
|
||||
let%bind lst' = bind_map_list aux lst in
|
||||
ok @@ Match_variant (lst', te)
|
||||
let%bind cases = bind_map_list aux cases in
|
||||
ok @@ Match_variant {cases ; tv}
|
||||
)
|
||||
|
||||
and map_program : mapper -> program -> program result = fun m p ->
|
||||
let aux = fun (x : declaration) ->
|
||||
match x with
|
||||
| Declaration_constant (n , e , i, env) -> (
|
||||
let%bind e' = map_expression m e in
|
||||
ok (Declaration_constant (n , e' , i, env))
|
||||
| Declaration_constant {binder; expr ; inline ; post_env} -> (
|
||||
let%bind expr = map_expression m expr in
|
||||
ok (Declaration_constant {binder; expr ; inline ; post_env})
|
||||
)
|
||||
in
|
||||
bind_map_list (bind_map_location aux) p
|
||||
|
||||
type 'a fold_mapper = 'a -> expression -> (bool * 'a * expression) result
|
||||
let rec fold_map_expression : 'a fold_mapper -> 'a -> expression -> ('a * expression) result = fun f a e ->
|
||||
let rec fold_map_expression : 'a . 'a fold_mapper -> 'a -> expression -> ('a * expression) result = fun f a e ->
|
||||
let self = fold_map_expression f in
|
||||
let%bind (continue, init',e') = f a e in
|
||||
if (not continue) then ok(init',e')
|
||||
@ -228,42 +228,42 @@ let rec fold_map_expression : 'a fold_mapper -> 'a -> expression -> ('a * expres
|
||||
)
|
||||
| E_literal _ | E_variable _ as e' -> ok (init', return e')
|
||||
|
||||
and fold_map_cases : 'a fold_mapper -> 'a -> matching_expr -> ('a * matching_expr) result = fun f init m ->
|
||||
and fold_map_cases : 'a . 'a fold_mapper -> 'a -> matching_expr -> ('a * matching_expr) result = fun f init m ->
|
||||
match m with
|
||||
| Match_bool { match_true ; match_false } -> (
|
||||
let%bind (init, match_true) = fold_map_expression f init match_true in
|
||||
let%bind (init, match_false) = fold_map_expression f init match_false in
|
||||
ok @@ (init, Match_bool { match_true ; match_false })
|
||||
)
|
||||
| Match_list { match_nil ; match_cons = (hd , tl , cons, te) } -> (
|
||||
| Match_list { match_nil ; match_cons = { hd ; tl ; body ; tv } } -> (
|
||||
let%bind (init, match_nil) = fold_map_expression f init match_nil in
|
||||
let%bind (init, cons) = fold_map_expression f init cons in
|
||||
ok @@ (init, Match_list { match_nil ; match_cons = (hd , tl , cons, te) })
|
||||
let%bind (init, body) = fold_map_expression f init body in
|
||||
ok @@ (init, Match_list { match_nil ; match_cons = { hd ; tl ; body ; tv } })
|
||||
)
|
||||
| Match_option { match_none ; match_some = (name , some, te) } -> (
|
||||
| Match_option { match_none ; match_some = { opt ; body ; tv } } -> (
|
||||
let%bind (init, match_none) = fold_map_expression f init match_none in
|
||||
let%bind (init, some) = fold_map_expression f init some in
|
||||
ok @@ (init, Match_option { match_none ; match_some = (name , some, te) })
|
||||
let%bind (init, body) = fold_map_expression f init body in
|
||||
ok @@ (init, Match_option { match_none ; match_some = { opt ; body ; tv } })
|
||||
)
|
||||
| Match_tuple ((names , e), te) -> (
|
||||
let%bind (init, e') = fold_map_expression f init e in
|
||||
ok @@ (init, Match_tuple ((names , e'), te))
|
||||
| Match_tuple { vars ; body ; tvs } -> (
|
||||
let%bind (init, body) = fold_map_expression f init body in
|
||||
ok @@ (init, Match_tuple {vars ; body ; tvs })
|
||||
)
|
||||
| Match_variant (lst, te) -> (
|
||||
let aux init ((a , b) , e) =
|
||||
let%bind (init,e') = fold_map_expression f init e in
|
||||
ok (init, ((a , b) , e'))
|
||||
| Match_variant {cases ; tv} -> (
|
||||
let aux init {constructor ; pattern ; body} =
|
||||
let%bind (init, body) = fold_map_expression f init body in
|
||||
ok (init, {constructor; pattern ; body})
|
||||
in
|
||||
let%bind (init,lst') = bind_fold_map_list aux init lst in
|
||||
ok @@ (init, Match_variant (lst', te))
|
||||
)
|
||||
let%bind (init,cases) = bind_fold_map_list aux init cases in
|
||||
ok @@ (init, Match_variant {cases ; tv})
|
||||
)
|
||||
|
||||
and fold_map_program : 'a fold_mapper -> 'a -> program -> ('a * program) result = fun m init p ->
|
||||
and fold_map_program : 'a . 'a fold_mapper -> 'a -> program -> ('a * program) result = fun m init p ->
|
||||
let aux = fun (acc,acc_prg) (x : declaration Location.wrap) ->
|
||||
match Location.unwrap x with
|
||||
| Declaration_constant (v , e , i, env) -> (
|
||||
let%bind (acc',e') = fold_map_expression m acc e in
|
||||
let wrap_content = Declaration_constant (v , e' , i, env) in
|
||||
| Declaration_constant {binder ; expr ; inline ; post_env} -> (
|
||||
let%bind (acc', expr) = fold_map_expression m acc expr in
|
||||
let wrap_content = Declaration_constant {binder ; expr ; inline ; post_env} in
|
||||
ok (acc', List.append acc_prg [{x with wrap_content}])
|
||||
)
|
||||
in
|
||||
@ -315,28 +315,28 @@ type contract_type = {
|
||||
let fetch_contract_type : string -> program -> contract_type result = fun main_fname program ->
|
||||
let main_decl = List.rev @@ List.filter
|
||||
(fun declt ->
|
||||
let (Declaration_constant (v , _ , _ , _)) = Location.unwrap declt in
|
||||
String.equal (Var.to_name v) main_fname
|
||||
let (Declaration_constant { binder ; expr=_ ; inline=_ ; post_env=_ }) = Location.unwrap declt in
|
||||
String.equal (Var.to_name binder) main_fname
|
||||
)
|
||||
program
|
||||
in
|
||||
match main_decl with
|
||||
| (hd::_) -> (
|
||||
let (Declaration_constant (_,e,_,_)) = Location.unwrap hd in
|
||||
match e.type_expression.type_content with
|
||||
let (Declaration_constant { binder=_ ; expr ; inline=_ ; post_env=_ }) = Location.unwrap hd in
|
||||
match expr.type_expression.type_content with
|
||||
| T_arrow {type1 ; type2} -> (
|
||||
match type1.type_content , type2.type_content with
|
||||
| T_record tin , T_record tout when (is_tuple_lmap tin) && (is_tuple_lmap tout) ->
|
||||
let%bind (parameter,storage) = Stage_common.Helpers.get_pair tin in
|
||||
let%bind (listop,storage') = Stage_common.Helpers.get_pair tout in
|
||||
let%bind () = trace_strong (Errors.expected_list_operation main_fname listop e) @@
|
||||
let%bind (parameter,storage) = Ast_typed.Helpers.get_pair tin in
|
||||
let%bind (listop,storage') = Ast_typed.Helpers.get_pair tout in
|
||||
let%bind () = trace_strong (Errors.expected_list_operation main_fname listop expr) @@
|
||||
Ast_typed.assert_t_list_operation listop in
|
||||
let%bind () = trace_strong (Errors.expected_same main_fname storage storage' e) @@
|
||||
let%bind () = trace_strong (Errors.expected_same main_fname storage storage' expr) @@
|
||||
Ast_typed.assert_type_expression_eq (storage,storage') in
|
||||
(* TODO: on storage/parameter : assert_storable, assert_passable ? *)
|
||||
ok { parameter ; storage }
|
||||
| _ -> fail @@ Errors.bad_contract_io main_fname e
|
||||
| _ -> fail @@ Errors.bad_contract_io main_fname expr
|
||||
)
|
||||
| _ -> fail @@ Errors.bad_contract_io main_fname e
|
||||
| _ -> fail @@ Errors.bad_contract_io main_fname expr
|
||||
)
|
||||
| [] -> simple_fail ("Entrypoint '"^main_fname^"' does not exist")
|
||||
|
@ -15,15 +15,15 @@ end
|
||||
|
||||
let rec check_no_nested_bigmap is_in_bigmap e =
|
||||
match e.type_content with
|
||||
| T_operator (TC_big_map (_, _)) when is_in_bigmap ->
|
||||
| T_operator (TC_big_map _) when is_in_bigmap ->
|
||||
fail @@ Errors.no_nested_bigmap
|
||||
| T_operator (TC_big_map (key, value)) ->
|
||||
let%bind _ = check_no_nested_bigmap false key in
|
||||
let%bind _ = check_no_nested_bigmap true value in
|
||||
| T_operator (TC_big_map {k ; v}) ->
|
||||
let%bind _ = check_no_nested_bigmap false k in
|
||||
let%bind _ = check_no_nested_bigmap true v in
|
||||
ok ()
|
||||
| T_operator (TC_map_or_big_map (key, value)) ->
|
||||
let%bind _ = check_no_nested_bigmap false key in
|
||||
let%bind _ = check_no_nested_bigmap true value in
|
||||
| T_operator (TC_map_or_big_map {k ; v}) ->
|
||||
let%bind _ = check_no_nested_bigmap false k in
|
||||
let%bind _ = check_no_nested_bigmap true v in
|
||||
ok ()
|
||||
| T_operator (TC_contract t)
|
||||
| T_operator (TC_option t)
|
||||
@ -31,17 +31,17 @@ let rec check_no_nested_bigmap is_in_bigmap e =
|
||||
| T_operator (TC_set t) ->
|
||||
let%bind _ = check_no_nested_bigmap is_in_bigmap t in
|
||||
ok ()
|
||||
| T_operator (TC_map (a, b)) ->
|
||||
let%bind _ = check_no_nested_bigmap is_in_bigmap a in
|
||||
let%bind _ = check_no_nested_bigmap is_in_bigmap b in
|
||||
| T_operator (TC_map { k ; v }) ->
|
||||
let%bind _ = check_no_nested_bigmap is_in_bigmap k in
|
||||
let%bind _ = check_no_nested_bigmap is_in_bigmap v in
|
||||
ok ()
|
||||
| T_operator (TC_arrow (a, b)) ->
|
||||
let%bind _ = check_no_nested_bigmap false a in
|
||||
let%bind _ = check_no_nested_bigmap false b in
|
||||
| T_operator (TC_arrow { type1 ; type2 }) ->
|
||||
let%bind _ = check_no_nested_bigmap false type1 in
|
||||
let%bind _ = check_no_nested_bigmap false type2 in
|
||||
ok ()
|
||||
| T_operator (TC_michelson_or (a, b)) ->
|
||||
let%bind _ = check_no_nested_bigmap false a in
|
||||
let%bind _ = check_no_nested_bigmap false b in
|
||||
| T_operator (TC_michelson_or {l; r}) ->
|
||||
let%bind _ = check_no_nested_bigmap false l in
|
||||
let%bind _ = check_no_nested_bigmap false r in
|
||||
ok ()
|
||||
| T_sum s ->
|
||||
let es = CMap.to_list s in
|
||||
|
@ -63,23 +63,23 @@ and check_recursive_call_in_matching = fun n final_path c ->
|
||||
let%bind _ = check_recursive_call n final_path match_true in
|
||||
let%bind _ = check_recursive_call n final_path match_false in
|
||||
ok ()
|
||||
| Match_list {match_nil;match_cons=(_,_,e,_)} ->
|
||||
| Match_list {match_nil;match_cons={hd=_;tl=_;body;tv=_}} ->
|
||||
let%bind _ = check_recursive_call n final_path match_nil in
|
||||
let%bind _ = check_recursive_call n final_path e in
|
||||
let%bind _ = check_recursive_call n final_path body in
|
||||
ok ()
|
||||
| Match_option {match_none; match_some=(_,e,_)} ->
|
||||
| Match_option {match_none; match_some={opt=_;body;tv=_}} ->
|
||||
let%bind _ = check_recursive_call n final_path match_none in
|
||||
let%bind _ = check_recursive_call n final_path e in
|
||||
let%bind _ = check_recursive_call n final_path body in
|
||||
ok ()
|
||||
| Match_tuple ((_,e),_) ->
|
||||
let%bind _ = check_recursive_call n final_path e in
|
||||
| Match_tuple {vars=_;body;tvs=_} ->
|
||||
let%bind _ = check_recursive_call n final_path body in
|
||||
ok ()
|
||||
| Match_variant (l,_) ->
|
||||
let aux (_,e) =
|
||||
let%bind _ = check_recursive_call n final_path e in
|
||||
| Match_variant {cases;tv=_} ->
|
||||
let aux {constructor=_; pattern=_; body} =
|
||||
let%bind _ = check_recursive_call n final_path body in
|
||||
ok ()
|
||||
in
|
||||
let%bind _ = bind_map_list aux l in
|
||||
let%bind _ = bind_map_list aux cases in
|
||||
ok ()
|
||||
|
||||
|
||||
|
@ -595,7 +595,7 @@ module Typer = struct
|
||||
| C_SELF_ADDRESS -> ok @@ t_self_address;
|
||||
| C_IMPLICIT_ACCOUNT -> ok @@ t_implicit_account;
|
||||
| C_SET_DELEGATE -> ok @@ t_set_delegate ;
|
||||
| c -> simple_fail @@ Format.asprintf "Typer not implemented for consant %a" Stage_common.PP.constant c
|
||||
| c -> simple_fail @@ Format.asprintf "Typer not implemented for consant %a" Ast_typed.PP.constant c
|
||||
end
|
||||
|
||||
let none = typer_0 "NONE" @@ fun tv_opt ->
|
||||
|
@ -1,10 +1,245 @@
|
||||
[@@@coverage exclude_file]
|
||||
(* open Types
|
||||
* open Format
|
||||
* open PP_helpers *)
|
||||
|
||||
(* include Stage_common.PP *)
|
||||
open Types
|
||||
open Format
|
||||
open PP_helpers
|
||||
|
||||
include Stage_common.PP
|
||||
include Ast_PP_type(Ast_typed_type_parameter)
|
||||
let constructor ppf (c:constructor') : unit =
|
||||
let Constructor c = c in fprintf ppf "%s" c
|
||||
|
||||
let label ppf (l:label) : unit =
|
||||
let Label l = l in fprintf ppf "%s" l
|
||||
|
||||
let cmap_sep value sep ppf m =
|
||||
let lst = CMap.to_kv_list m in
|
||||
let lst = List.sort (fun (Constructor a,_) (Constructor b,_) -> String.compare a b) lst in
|
||||
let new_pp ppf (k, v) = fprintf ppf "@[<h>%a -> %a@]" constructor k value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
let record_sep value sep ppf (m : 'a label_map) =
|
||||
let lst = LMap.to_kv_list m in
|
||||
let lst = List.sort_uniq (fun (Label a,_) (Label b,_) -> String.compare a b) lst in
|
||||
let new_pp ppf (k, v) = fprintf ppf "@[<h>%a -> %a@]" label k value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
let tuple_sep value sep ppf m =
|
||||
assert (Helpers.is_tuple_lmap m);
|
||||
let lst = Helpers.tuple_of_record m in
|
||||
let new_pp ppf (_, v) = fprintf ppf "%a" value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
(* Prints records which only contain the consecutive fields
|
||||
0..(cardinal-1) as tuples *)
|
||||
let tuple_or_record_sep value format_record sep_record format_tuple sep_tuple ppf m =
|
||||
if Helpers.is_tuple_lmap m then
|
||||
fprintf ppf format_tuple (tuple_sep value (tag sep_tuple)) m
|
||||
else
|
||||
fprintf ppf format_record (record_sep value (tag sep_record)) m
|
||||
|
||||
let list_sep_d x = list_sep x (tag " ,@ ")
|
||||
let cmap_sep_d x = cmap_sep x (tag " ,@ ")
|
||||
let tuple_or_record_sep_expr value = tuple_or_record_sep value "@[<hv 7>record[%a]@]" " ,@ " "@[<hv 2>( %a )@]" " ,@ "
|
||||
let tuple_or_record_sep_type value = tuple_or_record_sep value "@[<hv 7>record[%a]@]" " ,@ " "@[<hv 2>( %a )@]" " *@ "
|
||||
|
||||
let constant ppf : constant' -> unit = function
|
||||
| C_INT -> fprintf ppf "INT"
|
||||
| C_UNIT -> fprintf ppf "UNIT"
|
||||
| C_NIL -> fprintf ppf "NIL"
|
||||
| C_NOW -> fprintf ppf "NOW"
|
||||
| C_IS_NAT -> fprintf ppf "IS_NAT"
|
||||
| C_SOME -> fprintf ppf "SOME"
|
||||
| C_NONE -> fprintf ppf "NONE"
|
||||
| C_ASSERTION -> fprintf ppf "ASSERTION"
|
||||
| C_ASSERT_INFERRED -> fprintf ppf "ASSERT_INFERRED"
|
||||
| C_FAILWITH -> fprintf ppf "FAILWITH"
|
||||
| C_UPDATE -> fprintf ppf "UPDATE"
|
||||
(* Loops *)
|
||||
| C_ITER -> fprintf ppf "ITER"
|
||||
| C_FOLD -> fprintf ppf "FOLD"
|
||||
| C_FOLD_WHILE -> fprintf ppf "FOLD_WHILE"
|
||||
| C_FOLD_CONTINUE -> fprintf ppf "CONTINUE"
|
||||
| C_FOLD_STOP -> fprintf ppf "STOP"
|
||||
| C_LOOP_LEFT -> fprintf ppf "LOOP_LEFT"
|
||||
| C_LOOP_CONTINUE -> fprintf ppf "LOOP_CONTINUE"
|
||||
| C_LOOP_STOP -> fprintf ppf "LOOP_STOP"
|
||||
(* MATH *)
|
||||
| C_NEG -> fprintf ppf "NEG"
|
||||
| C_ABS -> fprintf ppf "ABS"
|
||||
| C_ADD -> fprintf ppf "ADD"
|
||||
| C_SUB -> fprintf ppf "SUB"
|
||||
| C_MUL -> fprintf ppf "MUL"
|
||||
| C_EDIV -> fprintf ppf "EDIV"
|
||||
| C_DIV -> fprintf ppf "DIV"
|
||||
| C_MOD -> fprintf ppf "MOD"
|
||||
(* LOGIC *)
|
||||
| C_NOT -> fprintf ppf "NOT"
|
||||
| C_AND -> fprintf ppf "AND"
|
||||
| C_OR -> fprintf ppf "OR"
|
||||
| C_XOR -> fprintf ppf "XOR"
|
||||
| C_LSL -> fprintf ppf "LSL"
|
||||
| C_LSR -> fprintf ppf "LSR"
|
||||
(* COMPARATOR *)
|
||||
| C_EQ -> fprintf ppf "EQ"
|
||||
| C_NEQ -> fprintf ppf "NEQ"
|
||||
| C_LT -> fprintf ppf "LT"
|
||||
| C_GT -> fprintf ppf "GT"
|
||||
| C_LE -> fprintf ppf "LE"
|
||||
| C_GE -> fprintf ppf "GE"
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE -> fprintf ppf "SIZE"
|
||||
| C_CONCAT -> fprintf ppf "CONCAT"
|
||||
| C_SLICE -> fprintf ppf "SLICE"
|
||||
| C_BYTES_PACK -> fprintf ppf "BYTES_PACK"
|
||||
| C_BYTES_UNPACK -> fprintf ppf "BYTES_UNPACK"
|
||||
| C_CONS -> fprintf ppf "CONS"
|
||||
(* Pair *)
|
||||
| C_PAIR -> fprintf ppf "PAIR"
|
||||
| C_CAR -> fprintf ppf "CAR"
|
||||
| C_CDR -> fprintf ppf "CDR"
|
||||
| C_LEFT -> fprintf ppf "LEFT"
|
||||
| C_RIGHT -> fprintf ppf "RIGHT"
|
||||
(* Set *)
|
||||
| C_SET_EMPTY -> fprintf ppf "SET_EMPTY"
|
||||
| C_SET_LITERAL -> fprintf ppf "SET_LITERAL"
|
||||
| C_SET_ADD -> fprintf ppf "SET_ADD"
|
||||
| C_SET_REMOVE -> fprintf ppf "SET_REMOVE"
|
||||
| C_SET_ITER -> fprintf ppf "SET_ITER"
|
||||
| C_SET_FOLD -> fprintf ppf "SET_FOLD"
|
||||
| C_SET_MEM -> fprintf ppf "SET_MEM"
|
||||
(* List *)
|
||||
| C_LIST_EMPTY -> fprintf ppf "LIST_EMPTY"
|
||||
| C_LIST_LITERAL -> fprintf ppf "LIST_LITERAL"
|
||||
| C_LIST_ITER -> fprintf ppf "LIST_ITER"
|
||||
| C_LIST_MAP -> fprintf ppf "LIST_MAP"
|
||||
| C_LIST_FOLD -> fprintf ppf "LIST_FOLD"
|
||||
(* Maps *)
|
||||
| C_MAP -> fprintf ppf "MAP"
|
||||
| C_MAP_EMPTY -> fprintf ppf "MAP_EMPTY"
|
||||
| C_MAP_LITERAL -> fprintf ppf "MAP_LITERAL"
|
||||
| C_MAP_GET -> fprintf ppf "MAP_GET"
|
||||
| C_MAP_GET_FORCE -> fprintf ppf "MAP_GET_FORCE"
|
||||
| C_MAP_ADD -> fprintf ppf "MAP_ADD"
|
||||
| C_MAP_REMOVE -> fprintf ppf "MAP_REMOVE"
|
||||
| C_MAP_UPDATE -> fprintf ppf "MAP_UPDATE"
|
||||
| C_MAP_ITER -> fprintf ppf "MAP_ITER"
|
||||
| C_MAP_MAP -> fprintf ppf "MAP_MAP"
|
||||
| C_MAP_FOLD -> fprintf ppf "MAP_FOLD"
|
||||
| C_MAP_MEM -> fprintf ppf "MAP_MEM"
|
||||
| C_MAP_FIND -> fprintf ppf "MAP_FIND"
|
||||
| C_MAP_FIND_OPT -> fprintf ppf "MAP_FIND_OP"
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP -> fprintf ppf "BIG_MAP"
|
||||
| C_BIG_MAP_EMPTY -> fprintf ppf "BIG_MAP_EMPTY"
|
||||
| C_BIG_MAP_LITERAL -> fprintf ppf "BIG_MAP_LITERAL"
|
||||
(* Crypto *)
|
||||
| C_SHA256 -> fprintf ppf "SHA256"
|
||||
| C_SHA512 -> fprintf ppf "SHA512"
|
||||
| C_BLAKE2b -> fprintf ppf "BLAKE2b"
|
||||
| C_HASH -> fprintf ppf "HASH"
|
||||
| C_HASH_KEY -> fprintf ppf "HASH_KEY"
|
||||
| C_CHECK_SIGNATURE -> fprintf ppf "CHECK_SIGNATURE"
|
||||
| C_CHAIN_ID -> fprintf ppf "CHAIN_ID"
|
||||
(* Blockchain *)
|
||||
| C_CALL -> fprintf ppf "CALL"
|
||||
| C_CONTRACT -> fprintf ppf "CONTRACT"
|
||||
| C_CONTRACT_OPT -> fprintf ppf "CONTRACT_OPT"
|
||||
| C_CONTRACT_ENTRYPOINT -> fprintf ppf "CONTRACT_ENTRYPOINT"
|
||||
| C_CONTRACT_ENTRYPOINT_OPT -> fprintf ppf "CONTRACT_ENTRYPOINT_OPT"
|
||||
| C_AMOUNT -> fprintf ppf "AMOUNT"
|
||||
| C_BALANCE -> fprintf ppf "BALANCE"
|
||||
| C_SOURCE -> fprintf ppf "SOURCE"
|
||||
| C_SENDER -> fprintf ppf "SENDER"
|
||||
| C_ADDRESS -> fprintf ppf "ADDRESS"
|
||||
| C_SELF -> fprintf ppf "SELF"
|
||||
| C_SELF_ADDRESS -> fprintf ppf "SELF_ADDRESS"
|
||||
| C_IMPLICIT_ACCOUNT -> fprintf ppf "IMPLICIT_ACCOUNT"
|
||||
| C_SET_DELEGATE -> fprintf ppf "SET_DELEGATE"
|
||||
| C_CREATE_CONTRACT -> fprintf ppf "CREATE_CONTRACT"
|
||||
|
||||
let literal ppf (l : literal) =
|
||||
match l with
|
||||
| Literal_unit -> fprintf ppf "unit"
|
||||
| Literal_void -> fprintf ppf "void"
|
||||
| Literal_bool b -> fprintf ppf "%b" b
|
||||
| Literal_int n -> fprintf ppf "%d" n
|
||||
| Literal_nat n -> fprintf ppf "+%d" n
|
||||
| Literal_timestamp n -> fprintf ppf "+%d" n
|
||||
| Literal_mutez n -> fprintf ppf "%dmutez" n
|
||||
| Literal_string s -> fprintf ppf "%S" s
|
||||
| Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
|
||||
| Literal_address s -> fprintf ppf "@%S" s
|
||||
| Literal_operation _ -> fprintf ppf "Operation(...bytes)"
|
||||
| Literal_key s -> fprintf ppf "key %s" s
|
||||
| Literal_key_hash s -> fprintf ppf "key_hash %s" s
|
||||
| Literal_signature s -> fprintf ppf "Signature %s" s
|
||||
| Literal_chain_id s -> fprintf ppf "Chain_id %s" s
|
||||
|
||||
let type_variable ppf (t : type_variable) : unit = fprintf ppf "%a" Var.pp t
|
||||
|
||||
and type_constant ppf (tc : type_constant) : unit =
|
||||
let s =
|
||||
match tc with
|
||||
| TC_unit -> "unit"
|
||||
| TC_string -> "string"
|
||||
| TC_bytes -> "bytes"
|
||||
| TC_nat -> "nat"
|
||||
| TC_int -> "int"
|
||||
| TC_mutez -> "mutez"
|
||||
| TC_bool -> "bool"
|
||||
| TC_operation -> "operation"
|
||||
| TC_address -> "address"
|
||||
| TC_key -> "key"
|
||||
| TC_key_hash -> "key_hash"
|
||||
| TC_signature -> "signature"
|
||||
| TC_timestamp -> "timestamp"
|
||||
| TC_chain_id -> "chain_id"
|
||||
| TC_void -> "void"
|
||||
in
|
||||
fprintf ppf "%s" s
|
||||
|
||||
open Format
|
||||
|
||||
let rec type_expression' :
|
||||
(formatter -> type_expression -> unit)
|
||||
-> formatter
|
||||
-> type_expression
|
||||
-> unit =
|
||||
fun f ppf te ->
|
||||
match te.type_content with
|
||||
| T_sum m -> fprintf ppf "@[<hv 4>sum[%a]@]" (cmap_sep_d f) m
|
||||
| T_record m -> fprintf ppf "%a" (tuple_or_record_sep_type f) m
|
||||
| T_arrow a -> fprintf ppf "%a -> %a" f a.type1 f a.type2
|
||||
| T_variable tv -> type_variable ppf tv
|
||||
| T_constant tc -> type_constant ppf tc
|
||||
| T_operator to_ -> type_operator f ppf to_
|
||||
|
||||
and type_expression ppf (te : type_expression) : unit =
|
||||
type_expression' type_expression ppf te
|
||||
|
||||
and type_operator :
|
||||
(formatter -> type_expression -> unit)
|
||||
-> formatter
|
||||
-> type_operator
|
||||
-> unit =
|
||||
fun f ppf to_ ->
|
||||
let s =
|
||||
match to_ with
|
||||
| TC_option te -> Format.asprintf "option(%a)" f te
|
||||
| TC_list te -> Format.asprintf "list(%a)" f te
|
||||
| TC_set te -> Format.asprintf "set(%a)" f te
|
||||
| TC_map {k; v} -> Format.asprintf "Map (%a,%a)" f k f v
|
||||
| TC_big_map {k; v} -> Format.asprintf "Big Map (%a,%a)" f k f v
|
||||
| TC_map_or_big_map {k; v} -> Format.asprintf "Map Or Big Map (%a,%a)" f k f v
|
||||
| TC_michelson_or {l; r} -> Format.asprintf "michelson_or (%a,%a)" f l f r
|
||||
| TC_arrow {type1; type2} -> Format.asprintf "arrow (%a,%a)" f type1 f type2
|
||||
| TC_contract te -> Format.asprintf "Contract (%a)" f te
|
||||
in
|
||||
fprintf ppf "(TO_%s)" s
|
||||
(* end include Stage_common.PP *)
|
||||
|
||||
let expression_variable ppf (ev : expression_variable) : unit =
|
||||
fprintf ppf "%a" Var.pp ev
|
||||
@ -46,10 +281,10 @@ and expression_content ppf (ec: expression_content) =
|
||||
type_expression fun_type
|
||||
expression_content (E_lambda lambda)
|
||||
|
||||
and assoc_expression ppf : expr * expr -> unit =
|
||||
fun (a, b) -> fprintf ppf "%a -> %a" expression a expression b
|
||||
and assoc_expression ppf : map_kv -> unit =
|
||||
fun {k ; v} -> fprintf ppf "%a -> %a" expression k expression v
|
||||
|
||||
and single_record_patch ppf ((p, expr) : label * expr) =
|
||||
and single_record_patch ppf ((p, expr) : label * expression) =
|
||||
fprintf ppf "%a <- %a" label p expression expr
|
||||
|
||||
|
||||
@ -59,26 +294,26 @@ and option_inline ppf inline =
|
||||
else
|
||||
fprintf ppf ""
|
||||
|
||||
and matching_variant_case : type a . (_ -> a -> unit) -> _ -> (constructor' * expression_variable) * a -> unit =
|
||||
fun f ppf ((c,n),a) ->
|
||||
fprintf ppf "| %a %a -> %a" constructor c expression_variable n f a
|
||||
and matching_variant_case : (_ -> expression -> unit) -> _ -> matching_content_case -> unit =
|
||||
fun f ppf {constructor=c; pattern; body} ->
|
||||
fprintf ppf "| %a %a -> %a" constructor c expression_variable pattern f body
|
||||
|
||||
and matching : type a . (formatter -> a -> unit) -> _ -> (a, 'var) matching_content -> unit = fun f ppf m -> match m with
|
||||
| Match_tuple ((lst, b),_) ->
|
||||
fprintf ppf "let (%a) = %a" (list_sep_d expression_variable) lst f b
|
||||
| Match_variant (lst, _) ->
|
||||
fprintf ppf "%a" (list_sep (matching_variant_case f) (tag "@.")) lst
|
||||
and matching : (formatter -> expression -> unit) -> _ -> matching_expr -> unit = fun f ppf m -> match m with
|
||||
| Match_tuple {vars; body; tvs=_} ->
|
||||
fprintf ppf "let (%a) = %a" (list_sep_d expression_variable) vars f body
|
||||
| Match_variant {cases ; tv=_} ->
|
||||
fprintf ppf "%a" (list_sep (matching_variant_case f) (tag "@.")) cases
|
||||
| Match_bool {match_true ; match_false} ->
|
||||
fprintf ppf "| True -> %a @.| False -> %a" f match_true f match_false
|
||||
| Match_list {match_nil ; match_cons = (hd_name, tl_name, match_cons, _)} ->
|
||||
fprintf ppf "| Nil -> %a @.| %a :: %a -> %a" f match_nil expression_variable hd_name expression_variable tl_name f match_cons
|
||||
| Match_option {match_none ; match_some = (some, match_some, _)} ->
|
||||
fprintf ppf "| None -> %a @.| Some %a -> %a" f match_none expression_variable some f match_some
|
||||
| Match_list {match_nil ; match_cons = {hd; tl; body; tv=_}} ->
|
||||
fprintf ppf "| Nil -> %a @.| %a :: %a -> %a" f match_nil expression_variable hd expression_variable tl f body
|
||||
| Match_option {match_none ; match_some = {opt; body; tv=_}} ->
|
||||
fprintf ppf "| None -> %a @.| Some %a -> %a" f match_none expression_variable opt f body
|
||||
|
||||
let declaration ppf (d : declaration) =
|
||||
match d with
|
||||
| Declaration_constant (name, expr, inline,_) ->
|
||||
fprintf ppf "const %a = %a%a" expression_variable name expression expr option_inline inline
|
||||
| Declaration_constant {binder; expr; inline; post_env=_} ->
|
||||
fprintf ppf "const %a = %a%a" expression_variable binder expression expr option_inline inline
|
||||
|
||||
let program ppf (p : program) =
|
||||
fprintf ppf "@[<v>%a@]"
|
||||
|
42
src/stages/4-ast_typed/PP_generic.ml
Normal file
42
src/stages/4-ast_typed/PP_generic.ml
Normal file
@ -0,0 +1,42 @@
|
||||
open Types
|
||||
open Fold
|
||||
open Format
|
||||
|
||||
let print_program : formatter -> program -> unit = fun ppf p ->
|
||||
ignore ppf ;
|
||||
let assert_nostate _ = () in (* (needs_parens, state) = assert (not needs_parens && match state with None -> true | Some _ -> false) in *)
|
||||
let nostate = false, "" in
|
||||
let op = {
|
||||
generic = (fun state info ->
|
||||
assert_nostate state;
|
||||
match info.node_instance.instance_kind with
|
||||
| RecordInstance { fields } ->
|
||||
false, "{ " ^ String.concat " ; " (List.map (fun (fld : 'x Adt_info.ctor_or_field_instance) -> fld.cf.name ^ " = " ^ snd (fld.cf_continue nostate)) fields) ^ " }"
|
||||
| VariantInstance { constructor={ cf = { name; is_builtin=_; type_=_ }; cf_continue }; variant=_ } ->
|
||||
(match cf_continue nostate with
|
||||
| true, arg -> true, name ^ " (" ^ arg ^ ")"
|
||||
| false, arg -> true, name ^ " " ^ arg)
|
||||
| PolyInstance { poly=_; arguments=_; poly_continue } ->
|
||||
(poly_continue nostate)
|
||||
);
|
||||
type_variable = (fun _visitor state type_meta -> assert_nostate state; false , (ignore type_meta;"TODO:TYPE_META")) ;
|
||||
type_meta = (fun _visitor state type_meta -> assert_nostate state; false , (ignore type_meta;"TODO:TYPE_META")) ;
|
||||
bool = (fun _visitor state b -> assert_nostate state; false , if b then "true" else "false") ;
|
||||
int = (fun _visitor state i -> assert_nostate state; false , string_of_int i) ;
|
||||
string = (fun _visitor state str -> assert_nostate state; false , "\"" ^ str ^ "\"") ;
|
||||
bytes = (fun _visitor state bytes -> assert_nostate state; false , (ignore bytes;"TODO:BYTES")) ;
|
||||
packed_internal_operation = (fun _visitor state op -> assert_nostate state; false , (ignore op;"TODO:PACKED_INTERNAL_OPERATION")) ;
|
||||
expression_variable = (fun _visitor state ev -> assert_nostate state; false , (ignore ev;"TODO:EXPRESSION_VARIABLE")) ;
|
||||
constructor' = (fun _visitor state c -> assert_nostate state; false , (ignore c;"TODO:CONSTRUCTOR'")) ;
|
||||
location = (fun _visitor state loc -> assert_nostate state; false , (ignore loc;"TODO:LOCATION'")) ;
|
||||
label = (fun _visitor state (Label lbl) -> assert_nostate state; true, "Label " ^ lbl) ;
|
||||
constructor_map = (fun _visitor continue state cmap -> assert_nostate state; false , (ignore (continue,cmap);"TODO:constructor_map")) ;
|
||||
label_map = (fun _visitor continue state lmap -> assert_nostate state; false , (ignore (continue,lmap);"TODO:label_map")) ;
|
||||
list = (fun _visitor continue state lst ->
|
||||
assert_nostate state;
|
||||
false , "[ " ^ String.concat " ; " (List.map snd @@ List.map (continue nostate) lst) ^ " ]") ;
|
||||
location_wrap = (fun _visitor continue state lwrap -> assert_nostate state; false , (ignore (continue,lwrap);"TODO:location_wrap")) ;
|
||||
list_ne = (fun _visitor continue state list_ne -> assert_nostate state; false , (ignore (continue,list_ne);"TODO:location_wrap")) ;
|
||||
} in
|
||||
let (_ , state) = fold__program op nostate p in
|
||||
Printf.printf "%s" state
|
@ -9,6 +9,7 @@ module Misc = struct
|
||||
include Misc
|
||||
include Misc_smart
|
||||
end
|
||||
module Helpers = Helpers
|
||||
|
||||
include Types
|
||||
include Misc
|
||||
|
@ -62,9 +62,9 @@ let ez_t_record lst ?s () : type_expression =
|
||||
t_record m ?s ()
|
||||
let t_pair a b ?s () : type_expression = ez_t_record [(Label "0",a) ; (Label "1",b)] ?s ()
|
||||
|
||||
let t_map key value ?s () = make_t (T_operator (TC_map (key , value))) s
|
||||
let t_big_map key value ?s () = make_t (T_operator (TC_big_map (key , value))) s
|
||||
let t_map_or_big_map key value ?s () = make_t (T_operator (TC_map_or_big_map (key,value))) s
|
||||
let t_map k v ?s () = make_t (T_operator (TC_map { k ; v })) s
|
||||
let t_big_map k v ?s () = make_t (T_operator (TC_big_map { k ; v })) s
|
||||
let t_map_or_big_map k v ?s () = make_t (T_operator (TC_map_or_big_map { k ; v })) s
|
||||
|
||||
let t_sum m ?s () : type_expression = make_t (T_sum m) s
|
||||
let make_t_ez_sum (lst:(constructor' * type_expression) list) : type_expression =
|
||||
@ -190,14 +190,14 @@ let get_t_record (t:type_expression) : type_expression label_map result = match
|
||||
|
||||
let get_t_map (t:type_expression) : (type_expression * type_expression) result =
|
||||
match t.type_content with
|
||||
| T_operator (TC_map (k,v)) -> ok (k, v)
|
||||
| T_operator (TC_map_or_big_map (k,v)) -> ok (k, v)
|
||||
| T_operator (TC_map { k ; v }) -> ok (k, v)
|
||||
| T_operator (TC_map_or_big_map { k ; v }) -> ok (k, v)
|
||||
| _ -> fail @@ Errors.not_a_x_type "map" t ()
|
||||
|
||||
let get_t_big_map (t:type_expression) : (type_expression * type_expression) result =
|
||||
match t.type_content with
|
||||
| T_operator (TC_big_map (k,v)) -> ok (k, v)
|
||||
| T_operator (TC_map_or_big_map (k,v)) -> ok (k, v)
|
||||
| T_operator (TC_big_map { k ; v }) -> ok (k, v)
|
||||
| T_operator (TC_map_or_big_map { k ; v }) -> ok (k, v)
|
||||
| _ -> fail @@ Errors.not_a_x_type "big_map" t ()
|
||||
|
||||
let get_t_map_key : type_expression -> type_expression result = fun t ->
|
||||
@ -341,7 +341,7 @@ let get_a_record_accessor = fun t ->
|
||||
let get_declaration_by_name : program -> string -> declaration result = fun p name ->
|
||||
let aux : declaration -> bool = fun declaration ->
|
||||
match declaration with
|
||||
| Declaration_constant (d, _, _, _) -> d = Var.of_name name
|
||||
| Declaration_constant { binder ; expr=_ ; inline=_ ; post_env=_ } -> binder = Var.of_name name
|
||||
in
|
||||
trace_option (Errors.declaration_not_found name ()) @@
|
||||
List.find_opt aux @@ List.map Location.unwrap p
|
||||
|
@ -126,7 +126,7 @@ val e_chain_id : string -> expression_content
|
||||
val e_operation : Memory_proto_alpha.Protocol.Alpha_context.packed_internal_operation -> expression_content
|
||||
val e_lambda : lambda -> expression_content
|
||||
val e_pair : expression -> expression -> expression_content
|
||||
val e_application : expression -> expr -> expression_content
|
||||
val e_application : expression -> expression -> expression_content
|
||||
val e_variable : expression_variable -> expression_content
|
||||
val e_let_in : expression_variable -> inline -> expression -> expression -> expression_content
|
||||
|
||||
|
@ -1,3 +1,10 @@
|
||||
(rule
|
||||
(target generated_fold.ml)
|
||||
(deps ../adt_generator/generator.raku types.ml)
|
||||
(action (with-stdout-to generated_fold.ml (run perl6 ../adt_generator/generator.raku types.ml)))
|
||||
; (mode (promote (until-clean)))
|
||||
)
|
||||
|
||||
(library
|
||||
(name ast_typed)
|
||||
(public_name ligo.ast_typed)
|
||||
@ -6,6 +13,7 @@
|
||||
tezos-utils
|
||||
ast_core ; Is that a good idea?
|
||||
stage_common
|
||||
adt_generator
|
||||
)
|
||||
(preprocess
|
||||
(pps ppx_let bisect_ppx --conditional)
|
||||
|
@ -43,12 +43,14 @@ let add_type : type_variable -> type_expression -> t -> t = fun k v -> List.Ne.h
|
||||
let get_opt : expression_variable -> t -> element option = fun k x -> List.Ne.find_map (Small.get_opt k) x
|
||||
let get_type_opt : type_variable -> t -> type_expression option = fun k x -> List.Ne.find_map (Small.get_type_opt k) x
|
||||
|
||||
let get_constructor : constructor' -> t -> (type_expression * type_expression) option = fun k x -> (* Left is the constructor, right is the sum type *)
|
||||
let convert_constructor' (S.Constructor c) = Constructor c
|
||||
|
||||
let get_constructor : Ast_core.constructor' -> t -> (type_expression * type_expression) option = fun k x -> (* Left is the constructor, right is the sum type *)
|
||||
let aux = fun x ->
|
||||
let aux = fun {type_variable=_ ; type_} ->
|
||||
match type_.type_content with
|
||||
| T_sum m ->
|
||||
(match CMap.find_opt k m with
|
||||
(match CMap.find_opt (convert_constructor' k) m with
|
||||
Some km -> Some (km , type_)
|
||||
| None -> None)
|
||||
| _ -> None
|
||||
|
@ -14,7 +14,7 @@ val add_ez_ae : expression_variable -> expression -> t -> t
|
||||
val add_type : type_variable -> type_expression -> t -> t
|
||||
val get_opt : expression_variable -> t -> element option
|
||||
val get_type_opt : type_variable -> t -> type_expression option
|
||||
val get_constructor : constructor' -> t -> (type_expression * type_expression) option
|
||||
val get_constructor : Ast_core.constructor' -> t -> (type_expression * type_expression) option
|
||||
|
||||
module Small : sig
|
||||
type t = small_environment
|
||||
|
1
src/stages/4-ast_typed/fold.ml
Normal file
1
src/stages/4-ast_typed/fold.ml
Normal file
@ -0,0 +1 @@
|
||||
include Generated_fold
|
165
src/stages/4-ast_typed/helpers.ml
Normal file
165
src/stages/4-ast_typed/helpers.ml
Normal file
@ -0,0 +1,165 @@
|
||||
open Types
|
||||
open Trace
|
||||
|
||||
let map_type_operator f = function
|
||||
TC_contract x -> TC_contract (f x)
|
||||
| TC_option x -> TC_option (f x)
|
||||
| TC_list x -> TC_list (f x)
|
||||
| TC_set x -> TC_set (f x)
|
||||
| TC_map {k ; v} -> TC_map { k = f k ; v = f v }
|
||||
| TC_big_map {k ; v}-> TC_big_map { k = f k ; v = f v }
|
||||
| TC_map_or_big_map { k ; v }-> TC_map_or_big_map { k = f k ; v = f v }
|
||||
| TC_michelson_or { l ; r } -> TC_michelson_or { l = f l ; r = f r }
|
||||
| TC_arrow {type1 ; type2} -> TC_arrow { type1 = f type1 ; type2 = f type2 }
|
||||
|
||||
let bind_map_type_operator f = function
|
||||
TC_contract x -> let%bind x = f x in ok @@ TC_contract x
|
||||
| TC_option x -> let%bind x = f x in ok @@ TC_option x
|
||||
| TC_list x -> let%bind x = f x in ok @@ TC_list x
|
||||
| TC_set x -> let%bind x = f x in ok @@ TC_set x
|
||||
| TC_map {k ; v} -> let%bind k = f k in let%bind v = f v in ok @@ TC_map {k ; v}
|
||||
| TC_big_map {k ; v} -> let%bind k = f k in let%bind v = f v in ok @@ TC_big_map {k ; v}
|
||||
| TC_map_or_big_map {k ; v} -> let%bind k = f k in let%bind v = f v in ok @@ TC_map_or_big_map {k ; v}
|
||||
| TC_michelson_or {l ; r}-> let%bind l = f l in let%bind r = f r in ok @@ TC_michelson_or {l ; r}
|
||||
| TC_arrow {type1 ; type2}-> let%bind type1 = f type1 in let%bind type2 = f type2 in ok @@ TC_arrow {type1 ; type2}
|
||||
|
||||
let type_operator_name = function
|
||||
TC_contract _ -> "TC_contract"
|
||||
| TC_option _ -> "TC_option"
|
||||
| TC_list _ -> "TC_list"
|
||||
| TC_set _ -> "TC_set"
|
||||
| TC_map _ -> "TC_map"
|
||||
| TC_big_map _ -> "TC_big_map"
|
||||
| TC_map_or_big_map _ -> "TC_map_or_big_map"
|
||||
| TC_michelson_or _ -> "TC_michelson_or"
|
||||
| TC_arrow _ -> "TC_arrow"
|
||||
|
||||
let type_expression'_of_string = function
|
||||
| "TC_contract" , [x] -> ok @@ T_operator(TC_contract x)
|
||||
| "TC_option" , [x] -> ok @@ T_operator(TC_option x)
|
||||
| "TC_list" , [x] -> ok @@ T_operator(TC_list x)
|
||||
| "TC_set" , [x] -> ok @@ T_operator(TC_set x)
|
||||
| "TC_map" , [k ; v] -> ok @@ T_operator(TC_map { k ; v })
|
||||
| "TC_big_map" , [k ; v] -> ok @@ T_operator(TC_big_map { k ; v })
|
||||
| ("TC_contract" | "TC_option" | "TC_list" | "TC_set" | "TC_map" | "TC_big_map"), _ ->
|
||||
failwith "internal error: wrong number of arguments for type operator"
|
||||
|
||||
| "TC_unit" , [] -> ok @@ T_constant(TC_unit)
|
||||
| "TC_string" , [] -> ok @@ T_constant(TC_string)
|
||||
| "TC_bytes" , [] -> ok @@ T_constant(TC_bytes)
|
||||
| "TC_nat" , [] -> ok @@ T_constant(TC_nat)
|
||||
| "TC_int" , [] -> ok @@ T_constant(TC_int)
|
||||
| "TC_mutez" , [] -> ok @@ T_constant(TC_mutez)
|
||||
| "TC_bool" , [] -> ok @@ T_constant(TC_bool)
|
||||
| "TC_operation" , [] -> ok @@ T_constant(TC_operation)
|
||||
| "TC_address" , [] -> ok @@ T_constant(TC_address)
|
||||
| "TC_key" , [] -> ok @@ T_constant(TC_key)
|
||||
| "TC_key_hash" , [] -> ok @@ T_constant(TC_key_hash)
|
||||
| "TC_chain_id" , [] -> ok @@ T_constant(TC_chain_id)
|
||||
| "TC_signature" , [] -> ok @@ T_constant(TC_signature)
|
||||
| "TC_timestamp" , [] -> ok @@ T_constant(TC_timestamp)
|
||||
| _, [] ->
|
||||
failwith "internal error: wrong number of arguments for type constant"
|
||||
| _ ->
|
||||
failwith "internal error: unknown type operator"
|
||||
|
||||
let string_of_type_operator = function
|
||||
| TC_contract x -> "TC_contract" , [x]
|
||||
| TC_option x -> "TC_option" , [x]
|
||||
| TC_list x -> "TC_list" , [x]
|
||||
| TC_set x -> "TC_set" , [x]
|
||||
| TC_map { k ; v } -> "TC_map" , [k ; v]
|
||||
| TC_big_map { k ; v } -> "TC_big_map" , [k ; v]
|
||||
| TC_map_or_big_map { k ; v } -> "TC_map_or_big_map" , [k ; v]
|
||||
| TC_michelson_or { l ; r } -> "TC_michelson_or" , [l ; r]
|
||||
| TC_arrow { type1 ; type2 } -> "TC_arrow" , [type1 ; type2]
|
||||
|
||||
let string_of_type_constant = function
|
||||
| TC_unit -> "TC_unit", []
|
||||
| TC_string -> "TC_string", []
|
||||
| TC_bytes -> "TC_bytes", []
|
||||
| TC_nat -> "TC_nat", []
|
||||
| TC_int -> "TC_int", []
|
||||
| TC_mutez -> "TC_mutez", []
|
||||
| TC_bool -> "TC_bool", []
|
||||
| TC_operation -> "TC_operation", []
|
||||
| TC_address -> "TC_address", []
|
||||
| TC_key -> "TC_key", []
|
||||
| TC_key_hash -> "TC_key_hash", []
|
||||
| TC_chain_id -> "TC_chain_id", []
|
||||
| TC_signature -> "TC_signature", []
|
||||
| TC_timestamp -> "TC_timestamp", []
|
||||
| TC_void -> "TC_void", []
|
||||
|
||||
let string_of_type_expression' = function
|
||||
| T_operator o -> string_of_type_operator o
|
||||
| T_constant c -> string_of_type_constant c
|
||||
| T_sum _ | T_record _ | T_arrow _ | T_variable _ ->
|
||||
failwith "not a type operator or constant"
|
||||
|
||||
let bind_lmap (l:_ label_map) =
|
||||
let open Trace in
|
||||
let open LMap in
|
||||
let aux k v prev =
|
||||
prev >>? fun prev' ->
|
||||
v >>? fun v' ->
|
||||
ok @@ add k v' prev' in
|
||||
fold aux l (ok empty)
|
||||
|
||||
let bind_cmap (c:_ constructor_map) =
|
||||
let open Trace in
|
||||
let open CMap in
|
||||
let aux k v prev =
|
||||
prev >>? fun prev' ->
|
||||
v >>? fun v' ->
|
||||
ok @@ add k v' prev' in
|
||||
fold aux c (ok empty)
|
||||
|
||||
let bind_fold_lmap f init (lmap:_ LMap.t) =
|
||||
let open Trace in
|
||||
let aux k v prev =
|
||||
prev >>? fun prev' ->
|
||||
f prev' k v
|
||||
in
|
||||
LMap.fold aux lmap init
|
||||
|
||||
let bind_map_lmap f map = bind_lmap (LMap.map f map)
|
||||
let bind_map_cmap f map = bind_cmap (CMap.map f map)
|
||||
let bind_map_lmapi f map = bind_lmap (LMap.mapi f map)
|
||||
let bind_map_cmapi f map = bind_cmap (CMap.mapi f map)
|
||||
|
||||
let range i j =
|
||||
let rec aux i j acc = if i >= j then acc else aux i (j-1) (j-1 :: acc) in
|
||||
aux i j []
|
||||
|
||||
let label_range i j =
|
||||
List.map (fun i -> Label (string_of_int i)) @@ range i j
|
||||
|
||||
let is_tuple_lmap m =
|
||||
List.for_all (fun i -> LMap.mem i m) @@ (label_range 0 (LMap.cardinal m))
|
||||
|
||||
let get_pair m =
|
||||
let open Trace in
|
||||
match (LMap.find_opt (Label "0") m , LMap.find_opt (Label "1") m) with
|
||||
| Some e1, Some e2 -> ok (e1,e2)
|
||||
| _ -> simple_fail "not a pair"
|
||||
|
||||
let tuple_of_record (m: _ LMap.t) =
|
||||
let aux i =
|
||||
let label = Label (string_of_int i) in
|
||||
let opt = LMap.find_opt (label) m in
|
||||
Option.bind (fun opt -> Some ((label,opt),i+1)) opt
|
||||
in
|
||||
Base.Sequence.to_list @@ Base.Sequence.unfold ~init:0 ~f:aux
|
||||
|
||||
let list_of_record_or_tuple (m: _ LMap.t) =
|
||||
if (is_tuple_lmap m) then
|
||||
List.map snd @@ tuple_of_record m
|
||||
else
|
||||
List.rev @@ LMap.to_list m
|
||||
|
||||
let kv_list_of_record_or_tuple (m: _ LMap.t) =
|
||||
if (is_tuple_lmap m) then
|
||||
tuple_of_record m
|
||||
else
|
||||
List.rev @@ LMap.to_kv_list m
|
@ -1,5 +1,6 @@
|
||||
open Trace
|
||||
open Types
|
||||
open Helpers
|
||||
|
||||
module Errors = struct
|
||||
let different_kinds a b () =
|
||||
@ -53,7 +54,7 @@ module Errors = struct
|
||||
error ~data title message ()
|
||||
|
||||
let different_props_in_record a b ra rb ka kb () =
|
||||
let names () = if Stage_common.Helpers.is_tuple_lmap ra && Stage_common.Helpers.is_tuple_lmap rb then "tuples" else "records" in
|
||||
let names () = if Helpers.is_tuple_lmap ra && Helpers.is_tuple_lmap rb then "tuples" else "records" in
|
||||
let title () = "different keys in " ^ (names ()) in
|
||||
let message () = "" in
|
||||
let data = [
|
||||
@ -65,8 +66,8 @@ module Errors = struct
|
||||
error ~data title message ()
|
||||
|
||||
let different_kind_record_tuple a b ra rb () =
|
||||
let name_a () = if Stage_common.Helpers.is_tuple_lmap ra then "tuple" else "record" in
|
||||
let name_b () = if Stage_common.Helpers.is_tuple_lmap rb then "tuple" else "record" in
|
||||
let name_a () = if Helpers.is_tuple_lmap ra then "tuple" else "record" in
|
||||
let name_b () = if Helpers.is_tuple_lmap rb then "tuple" else "record" in
|
||||
let title () = "different keys in " ^ (name_a ()) ^ " and " ^ (name_b ()) in
|
||||
let message () = "Expected these two types to be the same, but they're different (one is a " ^ (name_a ()) ^ " and the other is a " ^ (name_b ()) ^ ")" in
|
||||
let data = [
|
||||
@ -82,7 +83,7 @@ module Errors = struct
|
||||
|
||||
let different_size_records_tuples a b ra rb =
|
||||
different_size_type
|
||||
(if Stage_common.Helpers.is_tuple_lmap ra && Stage_common.Helpers.is_tuple_lmap rb
|
||||
(if Helpers.is_tuple_lmap ra && Helpers.is_tuple_lmap rb
|
||||
then "tuples"
|
||||
else "records")
|
||||
a b
|
||||
@ -228,17 +229,17 @@ module Free_variables = struct
|
||||
and expression : bindings -> expression -> bindings = fun b e ->
|
||||
expression_content b e.expression_content
|
||||
|
||||
and matching_variant_case : type a . (bindings -> a -> bindings) -> bindings -> ((constructor' * expression_variable) * a) -> bindings = fun f b ((_,n),c) ->
|
||||
f (union (singleton n) b) c
|
||||
and matching_variant_case : (bindings -> expression -> bindings) -> bindings -> matching_content_case -> bindings = fun f b { constructor=_ ; pattern ; body } ->
|
||||
f (union (singleton pattern) b) body
|
||||
|
||||
and matching : type a . (bindings -> a -> bindings) -> bindings -> (a,'var) matching_content -> bindings = fun f b m ->
|
||||
and matching : (bindings -> expression -> bindings) -> bindings -> matching_expr -> bindings = fun f b m ->
|
||||
match m with
|
||||
| Match_bool { match_true = t ; match_false = fa } -> union (f b t) (f b fa)
|
||||
| Match_list { match_nil = n ; match_cons = (hd, tl, c, _) } -> union (f b n) (f (union (of_list [hd ; tl]) b) c)
|
||||
| Match_option { match_none = n ; match_some = (opt, s, _) } -> union (f b n) (f (union (singleton opt) b) s)
|
||||
| Match_tuple ((lst , a), _) ->
|
||||
f (union (of_list lst) b) a
|
||||
| Match_variant (lst,_) -> unions @@ List.map (matching_variant_case f b) lst
|
||||
| Match_list { match_nil = n ; match_cons = {hd; tl; body; tv=_} } -> union (f b n) (f (union (of_list [hd ; tl]) b) body)
|
||||
| Match_option { match_none = n ; match_some = {opt; body; tv=_} } -> union (f b n) (f (union (singleton opt) b) body)
|
||||
| Match_tuple { vars ; body ; tvs=_ } ->
|
||||
f (union (of_list vars) b) body
|
||||
| Match_variant { cases ; tv=_ } -> unions @@ List.map (matching_variant_case f b) cases
|
||||
|
||||
and matching_expression = fun x -> matching expression x
|
||||
|
||||
@ -338,12 +339,13 @@ let rec assert_type_expression_eq (a, b: (type_expression * type_expression)) :
|
||||
| TC_list la, TC_list lb
|
||||
| TC_contract la, TC_contract lb
|
||||
| TC_set la, TC_set lb -> ok @@ ([la], [lb])
|
||||
| (TC_map (ka,va) | TC_map_or_big_map (ka,va)), (TC_map (kb,vb) | TC_map_or_big_map (kb,vb))
|
||||
| (TC_big_map (ka,va) | TC_map_or_big_map (ka,va)), (TC_big_map (kb,vb) | TC_map_or_big_map (kb,vb))
|
||||
-> ok @@ ([ka;va] ,[kb;vb])
|
||||
| TC_michelson_or (la,ra), TC_michelson_or (lb,rb) -> ok @@ ([la;ra] , [lb;rb])
|
||||
| (TC_option _ | TC_list _ | TC_contract _ | TC_set _ | TC_map _ | TC_big_map _ | TC_map_or_big_map _ | TC_arrow _ | TC_michelson_or _ ),
|
||||
(TC_option _ | TC_list _ | TC_contract _ | TC_set _ | TC_map _ | TC_big_map _ | TC_map_or_big_map _ | TC_arrow _ | TC_michelson_or _ ) -> fail @@ different_operators opa opb
|
||||
| (TC_map {k=ka;v=va} | TC_map_or_big_map {k=ka;v=va}), (TC_map {k=kb;v=vb} | TC_map_or_big_map {k=kb;v=vb})
|
||||
| (TC_big_map {k=ka;v=va} | TC_map_or_big_map {k=ka;v=va}), (TC_big_map {k=kb;v=vb} | TC_map_or_big_map {k=kb;v=vb})
|
||||
-> ok @@ ([ka;va] ,[kb;vb])
|
||||
| TC_michelson_or {l=la;r=ra}, TC_michelson_or {l=lb;r=rb} -> ok @@ ([la;ra] , [lb;rb])
|
||||
| (TC_option _ | TC_list _ | TC_contract _ | TC_set _ | TC_map _ | TC_big_map _ | TC_map_or_big_map _ | TC_arrow _| TC_michelson_or _ ),
|
||||
(TC_option _ | TC_list _ | TC_contract _ | TC_set _ | TC_map _ | TC_big_map _ | TC_map_or_big_map _ | TC_arrow _| TC_michelson_or _ )
|
||||
-> fail @@ different_operators opa opb
|
||||
in
|
||||
if List.length lsta <> List.length lstb then
|
||||
fail @@ different_operator_number_of_arguments opa opb (List.length lsta) (List.length lstb)
|
||||
@ -369,7 +371,7 @@ let rec assert_type_expression_eq (a, b: (type_expression * type_expression)) :
|
||||
)
|
||||
| T_sum _, _ -> fail @@ different_kinds a b
|
||||
| T_record ra, T_record rb
|
||||
when Stage_common.Helpers.is_tuple_lmap ra <> Stage_common.Helpers.is_tuple_lmap rb -> (
|
||||
when Helpers.is_tuple_lmap ra <> Helpers.is_tuple_lmap rb -> (
|
||||
fail @@ different_kind_record_tuple a b ra rb
|
||||
)
|
||||
| T_record ra, T_record rb -> (
|
||||
@ -489,7 +491,7 @@ let rec assert_value_eq (a, b: (expression*expression)) : unit result =
|
||||
| Some a, Some b -> Some (assert_value_eq (a, b))
|
||||
| _ -> Some (fail @@ missing_key_in_record_value k)
|
||||
in
|
||||
let%bind _all = Stage_common.Helpers.bind_lmap @@ LMap.merge aux sma smb in
|
||||
let%bind _all = Helpers.bind_lmap @@ LMap.merge aux sma smb in
|
||||
ok ()
|
||||
)
|
||||
| E_record _, _ ->
|
||||
@ -515,8 +517,8 @@ let merge_annotation (a:type_expression option) (b:type_expression option) err :
|
||||
let get_entry (lst : program) (name : string) : expression result =
|
||||
trace_option (Errors.missing_entry_point name) @@
|
||||
let aux x =
|
||||
let (Declaration_constant (an , expr, _, _)) = Location.unwrap x in
|
||||
if (an = Var.of_name name)
|
||||
let (Declaration_constant { binder ; expr ; inline=_ ; _ }) = Location.unwrap x in
|
||||
if Var.equal binder (Var.of_name name)
|
||||
then Some expr
|
||||
else None
|
||||
in
|
||||
@ -525,4 +527,4 @@ let get_entry (lst : program) (name : string) : expression result =
|
||||
let program_environment (program : program) : full_environment =
|
||||
let last_declaration = Location.unwrap List.(hd @@ rev program) in
|
||||
match last_declaration with
|
||||
| Declaration_constant (_ , _, _, post_env) -> post_env
|
||||
| Declaration_constant { binder=_ ; expr=_ ; inline=_ ; post_env } -> post_env
|
||||
|
@ -2,13 +2,13 @@ open Trace
|
||||
open Types
|
||||
open Combinators
|
||||
open Misc
|
||||
open Stage_common.Types
|
||||
(* open Stage_common.Types *)
|
||||
|
||||
let program_to_main : program -> string -> lambda result = fun p s ->
|
||||
let%bind (main , input_type , _) =
|
||||
let pred = fun d ->
|
||||
match d with
|
||||
| Declaration_constant (d , expr, _, _) when d = Var.of_name s -> Some expr
|
||||
| Declaration_constant { binder; expr; inline=_ ; post_env=_ } when binder = Var.of_name s -> Some expr
|
||||
| Declaration_constant _ -> None
|
||||
in
|
||||
let%bind main =
|
||||
@ -23,7 +23,7 @@ let program_to_main : program -> string -> lambda result = fun p s ->
|
||||
let env =
|
||||
let aux = fun _ d ->
|
||||
match d with
|
||||
| Declaration_constant (_ , _, _, post_env) -> post_env in
|
||||
| Declaration_constant {binder=_ ; expr= _ ; inline=_ ; post_env } -> post_env in
|
||||
List.fold_left aux Environment.full_empty (List.map Location.unwrap p) in
|
||||
let binder = Var.of_name "@contract_input" in
|
||||
let result =
|
||||
@ -86,27 +86,27 @@ module Captured_variables = struct
|
||||
let b' = union (singleton r.fun_name) b in
|
||||
expression_content b' env @@ E_lambda r.lambda
|
||||
|
||||
and matching_variant_case : type a . (bindings -> a -> bindings result) -> bindings -> ((constructor' * expression_variable) * a) -> bindings result = fun f b ((_,n),c) ->
|
||||
f (union (singleton n) b) c
|
||||
and matching_variant_case : (bindings -> expression -> bindings result) -> bindings -> matching_content_case -> bindings result = fun f b { constructor=_ ; pattern ; body } ->
|
||||
f (union (singleton pattern) b) body
|
||||
|
||||
and matching : type a . (bindings -> a -> bindings result) -> bindings -> (a, 'tv) matching_content -> bindings result = fun f b m ->
|
||||
and matching : (bindings -> expression -> bindings result) -> bindings -> matching_expr -> bindings result = fun f b m ->
|
||||
match m with
|
||||
| Match_bool { match_true = t ; match_false = fa } ->
|
||||
let%bind t' = f b t in
|
||||
let%bind fa' = f b fa in
|
||||
ok @@ union t' fa'
|
||||
| Match_list { match_nil = n ; match_cons = (hd, tl, c, _) } ->
|
||||
| Match_list { match_nil = n ; match_cons = {hd; tl; body; tv=_} } ->
|
||||
let%bind n' = f b n in
|
||||
let%bind c' = f (union (of_list [hd ; tl]) b) c in
|
||||
let%bind c' = f (union (of_list [hd ; tl]) b) body in
|
||||
ok @@ union n' c'
|
||||
| Match_option { match_none = n ; match_some = (opt, s, _) } ->
|
||||
| Match_option { match_none = n ; match_some = {opt; body; tv=_} } ->
|
||||
let%bind n' = f b n in
|
||||
let%bind s' = f (union (singleton opt) b) s in
|
||||
let%bind s' = f (union (singleton opt) b) body in
|
||||
ok @@ union n' s'
|
||||
| Match_tuple ((lst , a),_) ->
|
||||
f (union (of_list lst) b) a
|
||||
| Match_variant (lst , _) ->
|
||||
let%bind lst' = bind_map_list (matching_variant_case f b) lst in
|
||||
| Match_tuple { vars ; body ; tvs=_ } ->
|
||||
f (union (of_list vars) b) body
|
||||
| Match_variant { cases ; tv=_ } ->
|
||||
let%bind lst' = bind_map_list (matching_variant_case f b) cases in
|
||||
ok @@ unions lst'
|
||||
|
||||
and matching_expression = fun x -> matching expression x
|
||||
|
@ -6,7 +6,7 @@ val program_to_main : program -> string -> lambda result
|
||||
module Captured_variables : sig
|
||||
|
||||
type bindings = expression_variable list
|
||||
val matching : (bindings -> 'a -> bindings result) -> bindings -> ('a, type_expression) matching_content -> bindings result
|
||||
val matching : (bindings -> expression -> bindings result) -> bindings -> matching_expr -> bindings result
|
||||
|
||||
val matching_expression : bindings -> matching_expr -> bindings result
|
||||
|
||||
|
@ -1,17 +1,266 @@
|
||||
[@@@warning "-30"]
|
||||
|
||||
module S = Ast_core
|
||||
include Stage_common.Types
|
||||
include Types_utils
|
||||
|
||||
module Ast_typed_type_parameter = struct
|
||||
type type_meta = S.type_expression option
|
||||
end
|
||||
type type_constant =
|
||||
| TC_unit
|
||||
| TC_string
|
||||
| TC_bytes
|
||||
| TC_nat
|
||||
| TC_int
|
||||
| TC_mutez
|
||||
| TC_bool
|
||||
| TC_operation
|
||||
| TC_address
|
||||
| TC_key
|
||||
| TC_key_hash
|
||||
| TC_chain_id
|
||||
| TC_signature
|
||||
| TC_timestamp
|
||||
| TC_void
|
||||
|
||||
include Ast_generic_type (Ast_typed_type_parameter)
|
||||
type te_cmap = type_expression constructor_map
|
||||
and te_lmap = type_expression label_map
|
||||
|
||||
type program = declaration Location.wrap list
|
||||
and type_content =
|
||||
| T_sum of te_cmap
|
||||
| T_record of te_lmap
|
||||
| T_arrow of arrow
|
||||
| T_variable of type_variable
|
||||
| T_constant of type_constant
|
||||
| T_operator of type_operator
|
||||
|
||||
and inline = bool
|
||||
and arrow = {
|
||||
type1: type_expression;
|
||||
type2: type_expression;
|
||||
}
|
||||
|
||||
and type_map_args = {
|
||||
k : type_expression;
|
||||
v : type_expression;
|
||||
}
|
||||
|
||||
and michelson_or_args = {
|
||||
l : type_expression;
|
||||
r : type_expression;
|
||||
}
|
||||
|
||||
and type_operator =
|
||||
| TC_contract of type_expression
|
||||
| TC_option of type_expression
|
||||
| TC_list of type_expression
|
||||
| TC_set of type_expression
|
||||
| TC_map of type_map_args
|
||||
| TC_big_map of type_map_args
|
||||
| TC_map_or_big_map of type_map_args
|
||||
| TC_michelson_or of michelson_or_args
|
||||
| TC_arrow of arrow
|
||||
|
||||
|
||||
and type_expression = {
|
||||
type_content: type_content;
|
||||
type_meta: type_meta;
|
||||
}
|
||||
|
||||
type literal =
|
||||
| Literal_unit
|
||||
| Literal_bool of bool
|
||||
| Literal_int of int
|
||||
| Literal_nat of int
|
||||
| Literal_timestamp of int
|
||||
| Literal_mutez of int
|
||||
| Literal_string of string
|
||||
| Literal_bytes of bytes
|
||||
| Literal_address of string
|
||||
| Literal_signature of string
|
||||
| Literal_key of string
|
||||
| Literal_key_hash of string
|
||||
| Literal_chain_id of string
|
||||
| Literal_void
|
||||
| Literal_operation of packed_internal_operation
|
||||
|
||||
type matching_content_bool = {
|
||||
match_true : expression ;
|
||||
match_false : expression ;
|
||||
}
|
||||
|
||||
and matching_content_cons = {
|
||||
hd : expression_variable;
|
||||
tl : expression_variable;
|
||||
body : expression;
|
||||
tv : type_expression;
|
||||
}
|
||||
|
||||
and matching_content_list = {
|
||||
match_nil : expression ;
|
||||
match_cons : matching_content_cons;
|
||||
}
|
||||
|
||||
and matching_content_some = {
|
||||
opt : expression_variable ;
|
||||
body : expression ;
|
||||
tv : type_expression ;
|
||||
}
|
||||
|
||||
and matching_content_option = {
|
||||
match_none : expression ;
|
||||
match_some : matching_content_some ;
|
||||
}
|
||||
|
||||
and expression_variable_list = expression_variable list
|
||||
and type_expression_list = type_expression list
|
||||
|
||||
and matching_content_tuple = {
|
||||
vars : expression_variable_list ;
|
||||
body : expression ;
|
||||
tvs : type_expression_list ;
|
||||
}
|
||||
|
||||
and matching_content_case = {
|
||||
constructor : constructor' ;
|
||||
pattern : expression_variable ;
|
||||
body : expression ;
|
||||
}
|
||||
|
||||
and matching_content_case_list = matching_content_case list
|
||||
|
||||
and matching_content_variant = {
|
||||
cases: matching_content_case_list;
|
||||
tv: type_expression;
|
||||
}
|
||||
|
||||
and matching_expr =
|
||||
| Match_bool of matching_content_bool
|
||||
| Match_list of matching_content_list
|
||||
| Match_option of matching_content_option
|
||||
| Match_tuple of matching_content_tuple
|
||||
| Match_variant of matching_content_variant
|
||||
|
||||
and constant' =
|
||||
| C_INT
|
||||
| C_UNIT
|
||||
| C_NIL
|
||||
| C_NOW
|
||||
| C_IS_NAT
|
||||
| C_SOME
|
||||
| C_NONE
|
||||
| C_ASSERTION
|
||||
| C_ASSERT_INFERRED
|
||||
| C_FAILWITH
|
||||
| C_UPDATE
|
||||
(* Loops *)
|
||||
| C_ITER
|
||||
| C_FOLD_WHILE
|
||||
| C_FOLD_CONTINUE
|
||||
| C_FOLD_STOP
|
||||
| C_LOOP_LEFT
|
||||
| C_LOOP_CONTINUE
|
||||
| C_LOOP_STOP
|
||||
| C_FOLD
|
||||
(* MATH *)
|
||||
| C_NEG
|
||||
| C_ABS
|
||||
| C_ADD
|
||||
| C_SUB
|
||||
| C_MUL
|
||||
| C_EDIV
|
||||
| C_DIV
|
||||
| C_MOD
|
||||
(* LOGIC *)
|
||||
| C_NOT
|
||||
| C_AND
|
||||
| C_OR
|
||||
| C_XOR
|
||||
| C_LSL
|
||||
| C_LSR
|
||||
(* COMPARATOR *)
|
||||
| C_EQ
|
||||
| C_NEQ
|
||||
| C_LT
|
||||
| C_GT
|
||||
| C_LE
|
||||
| C_GE
|
||||
(* Bytes/ String *)
|
||||
| C_SIZE
|
||||
| C_CONCAT
|
||||
| C_SLICE
|
||||
| C_BYTES_PACK
|
||||
| C_BYTES_UNPACK
|
||||
| C_CONS
|
||||
(* Pair *)
|
||||
| C_PAIR
|
||||
| C_CAR
|
||||
| C_CDR
|
||||
| C_LEFT
|
||||
| C_RIGHT
|
||||
(* Set *)
|
||||
| C_SET_EMPTY
|
||||
| C_SET_LITERAL
|
||||
| C_SET_ADD
|
||||
| C_SET_REMOVE
|
||||
| C_SET_ITER
|
||||
| C_SET_FOLD
|
||||
| C_SET_MEM
|
||||
(* List *)
|
||||
| C_LIST_EMPTY
|
||||
| C_LIST_LITERAL
|
||||
| C_LIST_ITER
|
||||
| C_LIST_MAP
|
||||
| C_LIST_FOLD
|
||||
(* Maps *)
|
||||
| C_MAP
|
||||
| C_MAP_EMPTY
|
||||
| C_MAP_LITERAL
|
||||
| C_MAP_GET
|
||||
| C_MAP_GET_FORCE
|
||||
| C_MAP_ADD
|
||||
| C_MAP_REMOVE
|
||||
| C_MAP_UPDATE
|
||||
| C_MAP_ITER
|
||||
| C_MAP_MAP
|
||||
| C_MAP_FOLD
|
||||
| C_MAP_MEM
|
||||
| C_MAP_FIND
|
||||
| C_MAP_FIND_OPT
|
||||
(* Big Maps *)
|
||||
| C_BIG_MAP
|
||||
| C_BIG_MAP_EMPTY
|
||||
| C_BIG_MAP_LITERAL
|
||||
(* Crypto *)
|
||||
| C_SHA256
|
||||
| C_SHA512
|
||||
| C_BLAKE2b
|
||||
| C_HASH
|
||||
| C_HASH_KEY
|
||||
| C_CHECK_SIGNATURE
|
||||
| C_CHAIN_ID
|
||||
(* Blockchain *)
|
||||
| C_CALL
|
||||
| C_CONTRACT
|
||||
| C_CONTRACT_OPT
|
||||
| C_CONTRACT_ENTRYPOINT
|
||||
| C_CONTRACT_ENTRYPOINT_OPT
|
||||
| C_AMOUNT
|
||||
| C_BALANCE
|
||||
| C_SOURCE
|
||||
| C_SENDER
|
||||
| C_ADDRESS
|
||||
| C_SELF
|
||||
| C_SELF_ADDRESS
|
||||
| C_IMPLICIT_ACCOUNT
|
||||
| C_SET_DELEGATE
|
||||
| C_CREATE_CONTRACT
|
||||
|
||||
and declaration_loc = declaration location_wrap
|
||||
|
||||
and program = declaration_loc list
|
||||
|
||||
and declaration_constant = {
|
||||
binder : expression_variable ;
|
||||
expr : expression ;
|
||||
inline : bool ;
|
||||
post_env : full_environment ;
|
||||
}
|
||||
|
||||
and declaration =
|
||||
(* A Declaration_constant is described by
|
||||
@ -19,7 +268,7 @@ and declaration =
|
||||
* a boolean indicating whether it should be inlined
|
||||
* the environment before the declaration (the original environment)
|
||||
* the environment after the declaration (i.e. with that new declaration added to the original environment). *)
|
||||
| Declaration_constant of (expression_variable * expression * inline * full_environment)
|
||||
| Declaration_constant of declaration_constant
|
||||
(*
|
||||
| Declaration_type of (type_variable * type_expression)
|
||||
| Declaration_constant of (named_expression * (full_environment * full_environment))
|
||||
@ -28,11 +277,25 @@ and declaration =
|
||||
|
||||
and expression = {
|
||||
expression_content: expression_content ;
|
||||
location: Location.t ;
|
||||
location: location ;
|
||||
type_expression: type_expression ;
|
||||
environment: full_environment ;
|
||||
}
|
||||
|
||||
and map_kv = {
|
||||
k : expression ;
|
||||
v : expression ;
|
||||
}
|
||||
|
||||
and look_up = {
|
||||
ds : expression;
|
||||
ind : expression;
|
||||
}
|
||||
|
||||
and expression_label_map = expression label_map
|
||||
and map_kv_list = map_kv list
|
||||
and expression_list = expression list
|
||||
|
||||
and expression_content =
|
||||
(* Base *)
|
||||
| E_literal of literal
|
||||
@ -46,16 +309,17 @@ and expression_content =
|
||||
| E_constructor of constructor (* For user defined constructors *)
|
||||
| E_matching of matching
|
||||
(* Record *)
|
||||
| E_record of expression label_map
|
||||
| E_record of expression_label_map
|
||||
| E_record_accessor of record_accessor
|
||||
| E_record_update of record_update
|
||||
|
||||
and constant =
|
||||
{ cons_name: constant'
|
||||
; arguments: expression list }
|
||||
and constant = {
|
||||
cons_name: constant' ;
|
||||
arguments: expression_list ;
|
||||
}
|
||||
|
||||
and application = {
|
||||
lamb: expression ;
|
||||
lamb: expression ;
|
||||
args: expression ;
|
||||
}
|
||||
|
||||
@ -70,7 +334,7 @@ and let_in = {
|
||||
let_binder: expression_variable ;
|
||||
rhs: expression ;
|
||||
let_result: expression ;
|
||||
inline : inline ;
|
||||
inline : bool ;
|
||||
}
|
||||
|
||||
and recursive = {
|
||||
@ -95,10 +359,9 @@ and record_update = {
|
||||
update: expression ;
|
||||
}
|
||||
|
||||
and matching_expr = (expression,type_expression) matching_content
|
||||
and matching =
|
||||
{ matchee: expression
|
||||
; cases: matching_expr
|
||||
and matching = {
|
||||
matchee: expression ;
|
||||
cases: matching_expr ;
|
||||
}
|
||||
|
||||
and ascription = {
|
||||
@ -106,7 +369,6 @@ and ascription = {
|
||||
type_annotation: type_expression ;
|
||||
}
|
||||
|
||||
|
||||
and environment_element_definition =
|
||||
| ED_binder
|
||||
| ED_declaration of environment_element_definition_declaration
|
||||
@ -144,13 +406,10 @@ and small_environment = {
|
||||
type_environment: type_environment ;
|
||||
}
|
||||
|
||||
and full_environment = small_environment List.Ne.t
|
||||
|
||||
and expr = expression
|
||||
|
||||
and texpr = type_expression
|
||||
and full_environment = small_environment list_ne
|
||||
|
||||
and named_type_content = {
|
||||
type_name : type_variable;
|
||||
type_value : type_expression;
|
||||
}
|
||||
|
||||
|
71
src/stages/4-ast_typed/types_utils.ml
Normal file
71
src/stages/4-ast_typed/types_utils.ml
Normal file
@ -0,0 +1,71 @@
|
||||
module S = Ast_core
|
||||
open Simple_utils.Trace
|
||||
|
||||
(* include Stage_common.Types *)
|
||||
(* type expression_
|
||||
* and expression_variable = expression_ Var.t
|
||||
* type type_
|
||||
* and type_variable = type_ Var.t *)
|
||||
type expression_ = Stage_common.Types.expression_
|
||||
type expression_variable = Stage_common.Types.expression_variable
|
||||
type type_ = Stage_common.Types.type_
|
||||
type type_variable = Stage_common.Types.type_variable
|
||||
|
||||
type constructor' =
|
||||
| Constructor of string
|
||||
type label =
|
||||
| Label of string
|
||||
|
||||
module CMap = Map.Make( struct type t = constructor' let compare (Constructor a) (Constructor b) = compare a b end)
|
||||
module LMap = Map.Make( struct type t = label let compare (Label a) (Label b) = String.compare a b end)
|
||||
|
||||
type 'a label_map = 'a LMap.t
|
||||
type 'a constructor_map = 'a CMap.t
|
||||
type type_meta = S.type_expression option
|
||||
|
||||
type 'a location_wrap = 'a Location.wrap
|
||||
type 'a list_ne = 'a List.Ne.t
|
||||
type packed_internal_operation = Memory_proto_alpha.Protocol.Alpha_context.packed_internal_operation
|
||||
type location = Location.t
|
||||
type inline = bool
|
||||
|
||||
let fold_map__constructor_map : type a new_a state . (state -> a -> (state * new_a) result) -> state -> a constructor_map -> (state * new_a constructor_map) result =
|
||||
fun f state m ->
|
||||
let aux k v acc =
|
||||
let%bind (state , m) = acc in
|
||||
let%bind (state , new_v) = f state v in
|
||||
ok (state , CMap.add k new_v m) in
|
||||
let%bind (state , m) = CMap.fold aux m (ok (state, CMap.empty)) in
|
||||
ok (state , m)
|
||||
|
||||
let fold_map__label_map : type a state new_a . (state -> a -> (state * new_a) result) -> state -> a label_map -> (state * new_a label_map) result =
|
||||
fun f state m ->
|
||||
let aux k v acc =
|
||||
let%bind (state , m) = acc in
|
||||
let%bind (state , new_v) = f state v in
|
||||
ok (state , LMap.add k new_v m) in
|
||||
let%bind (state , m) = LMap.fold aux m (ok (state, LMap.empty)) in
|
||||
ok (state , m)
|
||||
|
||||
let fold_map__list : type a state new_a . (state -> a -> (state * new_a) result) -> state -> a list -> (state * new_a list) Simple_utils.Trace.result =
|
||||
fun f state l ->
|
||||
let aux acc element =
|
||||
let%bind state , l = acc in
|
||||
let%bind (state , new_element) = f state element in ok (state , new_element :: l) in
|
||||
let%bind (state , l) = List.fold_left aux (ok (state , [])) l in
|
||||
ok (state , l)
|
||||
|
||||
let fold_map__location_wrap : type a state new_a . (state -> a -> (state * new_a) result) -> state -> a location_wrap -> (state * new_a location_wrap) Simple_utils.Trace.result =
|
||||
fun f state { wrap_content ; location } ->
|
||||
let%bind ( state , wrap_content ) = f state wrap_content in
|
||||
ok (state , ({ wrap_content ; location } : new_a location_wrap))
|
||||
|
||||
let fold_map__list_ne : type a state new_a . (state -> a -> (state * new_a) result) -> state -> a list_ne -> (state * new_a list_ne) Simple_utils.Trace.result =
|
||||
fun f state (first , l) ->
|
||||
let%bind (state , new_first) = f state first in
|
||||
let aux acc element =
|
||||
let%bind state , l = acc in
|
||||
let%bind (state , new_element) = f state element in
|
||||
ok (state , new_element :: l) in
|
||||
let%bind (state , l) = List.fold_left aux (ok (state , [])) l in
|
||||
ok (state , (new_first , l))
|
@ -77,7 +77,7 @@ and expression = {
|
||||
}
|
||||
|
||||
and constant = {
|
||||
cons_name : constant'; (* this is at the end because it is huge *)
|
||||
cons_name : constant';
|
||||
arguments : expression list;
|
||||
}
|
||||
|
||||
|
2
src/stages/adt_generator/.gitignore
vendored
Normal file
2
src/stages/adt_generator/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
# This is an auto-generated test file
|
||||
/generated_fold.ml
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user