Merge repository ligo-utils
This commit is contained in:
commit
fd513af745
6
vendors/ligo-utils/.gitignore
vendored
Normal file
6
vendors/ligo-utils/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
_build/*
|
||||
*/_build
|
||||
.merlin
|
||||
*/.merlin
|
||||
*.install
|
||||
*/*.install
|
190
vendors/ligo-utils/proto-alpha-utils/cast.ml
vendored
Normal file
190
vendors/ligo-utils/proto-alpha-utils/cast.ml
vendored
Normal file
@ -0,0 +1,190 @@
|
||||
module Error_monad = X_error_monad
|
||||
open Tezos_micheline
|
||||
|
||||
let env = Error_monad.force_lwt ~msg:"Cast:init environment" @@ Init_proto_alpha.init_environment ()
|
||||
|
||||
open Memory_proto_alpha
|
||||
open Alpha_context
|
||||
|
||||
exception Expr_from_string
|
||||
let expr_of_string str =
|
||||
let (ast, errs) = Michelson_parser.V1.parse_expression ~check:false str in
|
||||
(match errs with
|
||||
| [] -> ()
|
||||
| lst -> (
|
||||
Format.printf "expr_from_string: %a\n" Error_monad.pp_print_error lst;
|
||||
raise Expr_from_string
|
||||
));
|
||||
ast.expanded
|
||||
|
||||
let tl_of_string str =
|
||||
let (ast, errs) = Michelson_parser.V1.parse_toplevel ~check:false str in
|
||||
(match errs with
|
||||
| [] -> ()
|
||||
| lst -> (
|
||||
Format.printf "expr_from_string: %a\n" Error_monad.pp_print_error lst;
|
||||
raise Expr_from_string
|
||||
));
|
||||
ast.expanded
|
||||
|
||||
let lexpr_of_string str =
|
||||
Script.lazy_expr @@ expr_of_string str
|
||||
|
||||
let ltl_of_string str =
|
||||
Script.lazy_expr @@ tl_of_string str
|
||||
|
||||
let node_of_string str =
|
||||
Micheline.root @@ expr_of_string str
|
||||
|
||||
let node_to_string (node:_ Micheline.node) =
|
||||
let stripped = Micheline.strip_locations node in
|
||||
let print_node = Micheline_printer.printable Michelson_v1_primitives.string_of_prim stripped in
|
||||
Micheline_printer.print_expr Format.str_formatter print_node ;
|
||||
Format.flush_str_formatter ()
|
||||
|
||||
open Script_ir_translator
|
||||
|
||||
let rec mapper (Ex_typed_value (ty, a)) =
|
||||
let open Alpha_environment.Error_monad in
|
||||
let open Script_typed_ir in
|
||||
let open Micheline in
|
||||
match ty, a with
|
||||
| Big_map_t (kt, vt, Some (`Type_annot "toto")), map ->
|
||||
let kt = ty_of_comparable_ty kt in
|
||||
fold_left_s
|
||||
(fun l (k, v) ->
|
||||
match v with
|
||||
| None -> return l
|
||||
| Some v -> (
|
||||
let key = data_to_node (Ex_typed_value (kt, k)) in
|
||||
let value = data_to_node (Ex_typed_value (vt, v)) in
|
||||
return (Prim (-1, Michelson_v1_primitives.D_Elt, [ key ; value ], []) :: l))
|
||||
)
|
||||
[]
|
||||
(map_fold (fun k v acc -> (k, v) :: acc) map.diff []) >>=? fun items ->
|
||||
return (Some (Micheline.Seq (-1, String (-1, "...") :: items)))
|
||||
| _ -> return None
|
||||
|
||||
and data_to_node (Ex_typed_value (ty, data)) =
|
||||
let tc = env.tezos_context in
|
||||
let node_lwt = Script_ir_translator.unparse_data tc ~mapper Readable ty data in
|
||||
let node = fst @@ Error_monad.force_lwt_alpha ~msg:"data to string" node_lwt in
|
||||
node
|
||||
|
||||
let data_to_string ty data =
|
||||
let node = data_to_node (Ex_typed_value (ty, data)) in
|
||||
node_to_string node
|
||||
|
||||
open Script_typed_ir
|
||||
open Script_interpreter
|
||||
type ex_typed_stack =
|
||||
Ex_typed_stack : ('a stack_ty * 'a stack) -> ex_typed_stack
|
||||
|
||||
let stack_to_string stack_ty stack =
|
||||
let rec aux acc fst (Ex_typed_stack(stack_ty,stack)) =
|
||||
match (stack_ty, stack) with
|
||||
| Item_t (hd_ty, tl_ty, _), Item (hd, tl) -> (
|
||||
let separator = if not fst then " ; " else "" in
|
||||
let str = data_to_string hd_ty hd in
|
||||
let acc = acc ^ separator ^ str in
|
||||
let new_value = aux acc false (Ex_typed_stack (tl_ty, tl)) in
|
||||
new_value
|
||||
)
|
||||
| _ -> acc in
|
||||
aux "" true @@ Ex_typed_stack(stack_ty, stack)
|
||||
|
||||
let ty_to_node ty =
|
||||
let (node, _) = Error_monad.force_lwt_alpha ~msg:"ty to node" @@ Script_ir_translator.unparse_ty env.tezos_context ty in
|
||||
node
|
||||
|
||||
type ex_descr =
|
||||
Ex_descr : (_, _) Script_typed_ir.descr -> ex_descr
|
||||
|
||||
let descr_to_node x =
|
||||
let open Alpha_context.Script in
|
||||
let open Micheline in
|
||||
let open! Script_typed_ir in
|
||||
let rec f : ex_descr -> Script.node = fun descr ->
|
||||
let prim ?children ?children_nodes p =
|
||||
match (children, children_nodes) with
|
||||
| Some children, None ->
|
||||
Prim (0, p, List.map f children, [])
|
||||
| Some _, Some _ ->
|
||||
raise @@ Failure "descr_to_node: too many parameters"
|
||||
| None, Some children_nodes ->
|
||||
Prim (0, p, children_nodes, [])
|
||||
| None, None ->
|
||||
Prim (0, p, [], [])
|
||||
in
|
||||
let (Ex_descr descr) = descr in
|
||||
match descr.instr with
|
||||
| Dup -> prim I_DUP
|
||||
| Drop -> prim I_DROP
|
||||
| Swap -> prim I_SWAP
|
||||
| Dip c -> prim ~children:[Ex_descr c] I_DIP
|
||||
| Car -> prim I_CAR
|
||||
| Cdr -> prim I_CDR
|
||||
| Cons_pair -> prim I_PAIR
|
||||
| Nop -> prim I_NOP
|
||||
| Seq (a, b) -> Micheline.Seq (0, List.map f [Ex_descr a ; Ex_descr b])
|
||||
| Const v -> (
|
||||
let (Item_t (ty, _, _)) = descr.aft in
|
||||
prim ~children_nodes:[data_to_node (Ex_typed_value (ty, v))] I_PUSH
|
||||
)
|
||||
| Failwith _ -> prim I_FAILWITH
|
||||
| If (a, b) -> prim ~children:[Ex_descr a ; Ex_descr b] I_IF
|
||||
| Loop c -> prim ~children:[Ex_descr c] I_LOOP
|
||||
| If_left (a, b) -> prim ~children:[Ex_descr a ; Ex_descr b] I_IF_LEFT
|
||||
| Left -> prim I_LEFT
|
||||
| Right -> prim I_RIGHT
|
||||
| Loop_left c -> prim ~children:[Ex_descr c] I_LOOP_LEFT
|
||||
| If_none (a, b) -> prim ~children:[Ex_descr a ; Ex_descr b] I_IF_NONE
|
||||
| Cons_none _ -> prim I_NONE
|
||||
| Cons_some -> prim I_SOME
|
||||
| Nil -> prim I_NIL
|
||||
| Cons_list -> prim I_CONS
|
||||
| If_cons (a, b) -> prim ~children:[Ex_descr a ; Ex_descr b] I_IF_CONS
|
||||
| List_iter _ -> prim I_ITER
|
||||
| Compare _ -> prim I_COMPARE
|
||||
| Int_nat -> prim I_INT
|
||||
| Add_natnat -> prim I_ADD
|
||||
| Add_natint -> prim I_ADD
|
||||
| Add_intnat -> prim I_ADD
|
||||
| Sub_int -> prim I_SUB
|
||||
| Mul_natnat -> prim I_MUL
|
||||
| Ediv_natnat -> prim I_MUL
|
||||
| Map_get -> prim I_GET
|
||||
| Map_update -> prim I_UPDATE
|
||||
| Big_map_get -> prim I_GET
|
||||
| Big_map_update -> prim I_UPDATE
|
||||
| Gt -> prim I_GT
|
||||
| Ge -> prim I_GE
|
||||
| Pack _ -> prim I_PACK
|
||||
| Unpack _ -> prim I_UNPACK
|
||||
| Blake2b -> prim I_BLAKE2B
|
||||
| And -> prim I_AND
|
||||
| Xor -> prim I_XOR
|
||||
| _ -> raise @@ Failure "descr to node" in
|
||||
f @@ Ex_descr x
|
||||
|
||||
let rec flatten_node =
|
||||
let open! Micheline in
|
||||
function
|
||||
| Micheline.Seq (a, lst) -> (
|
||||
let aux = function
|
||||
| Prim (loc, p, children, annot) -> [ Prim (loc, p, List.map flatten_node children, annot) ]
|
||||
| Seq (_, lst) -> List.map flatten_node lst
|
||||
| x -> [ x ] in
|
||||
let seqs = List.map aux @@ List.map flatten_node lst in
|
||||
Seq (a, List.concat seqs) )
|
||||
| x -> x
|
||||
|
||||
let descr_to_string descr =
|
||||
let node = descr_to_node descr in
|
||||
let node = flatten_node node in
|
||||
node_to_string node
|
||||
|
||||
let n_of_int n =
|
||||
match Script_int.is_nat @@ Script_int.of_int n with
|
||||
| None -> raise @@ Failure "n_of_int"
|
||||
| Some n -> n
|
12
vendors/ligo-utils/proto-alpha-utils/dune
vendored
Normal file
12
vendors/ligo-utils/proto-alpha-utils/dune
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
(library
|
||||
(name proto_alpha_utils)
|
||||
(public_name proto-alpha-utils)
|
||||
(libraries
|
||||
tezos-error-monad
|
||||
tezos-stdlib-unix
|
||||
tezos-memory-proto-alpha
|
||||
simple-utils
|
||||
tezos-utils
|
||||
)
|
||||
(flags (:standard -open Simple_utils ))
|
||||
)
|
1
vendors/ligo-utils/proto-alpha-utils/dune-project
vendored
Normal file
1
vendors/ligo-utils/proto-alpha-utils/dune-project
vendored
Normal file
@ -0,0 +1 @@
|
||||
(lang dune 1.6)
|
292
vendors/ligo-utils/proto-alpha-utils/init_proto_alpha.ml
vendored
Normal file
292
vendors/ligo-utils/proto-alpha-utils/init_proto_alpha.ml
vendored
Normal file
@ -0,0 +1,292 @@
|
||||
open! Memory_proto_alpha
|
||||
module Signature = Tezos_base.TzPervasives.Signature
|
||||
module Data_encoding = Alpha_environment.Data_encoding
|
||||
module MBytes = Alpha_environment.MBytes
|
||||
module Error_monad = X_error_monad
|
||||
open Error_monad
|
||||
|
||||
|
||||
|
||||
module Context_init = struct
|
||||
|
||||
type account = {
|
||||
pkh : Signature.Public_key_hash.t ;
|
||||
pk : Signature.Public_key.t ;
|
||||
sk : Signature.Secret_key.t ;
|
||||
}
|
||||
|
||||
let generate_accounts n : (account * Tez_repr.t) list =
|
||||
let amount = Tez_repr.of_mutez_exn 4_000_000_000_000L in
|
||||
List.map (fun _ ->
|
||||
let (pkh, pk, sk) = Signature.generate_key () in
|
||||
let account = { pkh ; pk ; sk } in
|
||||
account, amount)
|
||||
(List.range n)
|
||||
|
||||
let make_shell
|
||||
~level ~predecessor ~timestamp ~fitness ~operations_hash =
|
||||
Tezos_base.Block_header.{
|
||||
level ;
|
||||
predecessor ;
|
||||
timestamp ;
|
||||
fitness ;
|
||||
operations_hash ;
|
||||
(* We don't care of the following values, only the shell validates them. *)
|
||||
proto_level = 0 ;
|
||||
validation_passes = 0 ;
|
||||
context = Alpha_environment.Context_hash.zero ;
|
||||
}
|
||||
|
||||
let default_proof_of_work_nonce =
|
||||
MBytes.create Alpha_context.Constants.proof_of_work_nonce_size
|
||||
|
||||
let protocol_param_key = [ "protocol_parameters" ]
|
||||
|
||||
let check_constants_consistency constants =
|
||||
let open Constants_repr in
|
||||
let open Error_monad in
|
||||
let { blocks_per_cycle ; blocks_per_commitment ;
|
||||
blocks_per_roll_snapshot ; _ } = constants in
|
||||
Error_monad.unless (blocks_per_commitment <= blocks_per_cycle)
|
||||
(fun () -> failwith "Inconsistent constants : blocks per commitment must be \
|
||||
less than blocks per cycle") >>=? fun () ->
|
||||
Error_monad.unless (blocks_per_cycle >= blocks_per_roll_snapshot)
|
||||
(fun () -> failwith "Inconsistent constants : blocks per cycle \
|
||||
must be superior than blocks per roll snapshot") >>=?
|
||||
return
|
||||
|
||||
|
||||
let initial_context
|
||||
constants
|
||||
header
|
||||
commitments
|
||||
initial_accounts
|
||||
security_deposit_ramp_up_cycles
|
||||
no_reward_cycles
|
||||
=
|
||||
let open Tezos_base.TzPervasives.Error_monad in
|
||||
let bootstrap_accounts =
|
||||
List.map (fun ({ pk ; pkh ; _ }, amount) ->
|
||||
Parameters_repr.{ public_key_hash = pkh ; public_key = Some pk ; amount }
|
||||
) initial_accounts
|
||||
in
|
||||
let json =
|
||||
Data_encoding.Json.construct
|
||||
Parameters_repr.encoding
|
||||
Parameters_repr.{
|
||||
bootstrap_accounts ;
|
||||
bootstrap_contracts = [] ;
|
||||
commitments ;
|
||||
constants ;
|
||||
security_deposit_ramp_up_cycles ;
|
||||
no_reward_cycles ;
|
||||
}
|
||||
in
|
||||
let proto_params =
|
||||
Data_encoding.Binary.to_bytes_exn Data_encoding.json json
|
||||
in
|
||||
Tezos_protocol_environment_memory.Context.(
|
||||
set empty ["version"] (MBytes.of_string "genesis")
|
||||
) >>= fun ctxt ->
|
||||
Tezos_protocol_environment_memory.Context.(
|
||||
set ctxt protocol_param_key proto_params
|
||||
) >>= fun ctxt ->
|
||||
Main.init ctxt header
|
||||
>|= Alpha_environment.wrap_error >>=? fun { context; _ } ->
|
||||
return context
|
||||
|
||||
let genesis
|
||||
?(preserved_cycles = Constants_repr.default.preserved_cycles)
|
||||
?(blocks_per_cycle = Constants_repr.default.blocks_per_cycle)
|
||||
?(blocks_per_commitment = Constants_repr.default.blocks_per_commitment)
|
||||
?(blocks_per_roll_snapshot = Constants_repr.default.blocks_per_roll_snapshot)
|
||||
?(blocks_per_voting_period = Constants_repr.default.blocks_per_voting_period)
|
||||
?(time_between_blocks = Constants_repr.default.time_between_blocks)
|
||||
?(endorsers_per_block = Constants_repr.default.endorsers_per_block)
|
||||
?(hard_gas_limit_per_operation = Constants_repr.default.hard_gas_limit_per_operation)
|
||||
?(hard_gas_limit_per_block = Constants_repr.default.hard_gas_limit_per_block)
|
||||
?(proof_of_work_threshold = Int64.(neg one))
|
||||
?(tokens_per_roll = Constants_repr.default.tokens_per_roll)
|
||||
?(michelson_maximum_type_size = Constants_repr.default.michelson_maximum_type_size)
|
||||
?(seed_nonce_revelation_tip = Constants_repr.default.seed_nonce_revelation_tip)
|
||||
?(origination_size = Constants_repr.default.origination_size)
|
||||
?(block_security_deposit = Constants_repr.default.block_security_deposit)
|
||||
?(endorsement_security_deposit = Constants_repr.default.endorsement_security_deposit)
|
||||
?(block_reward = Constants_repr.default.block_reward)
|
||||
?(endorsement_reward = Constants_repr.default.endorsement_reward)
|
||||
?(cost_per_byte = Constants_repr.default.cost_per_byte)
|
||||
?(hard_storage_limit_per_operation = Constants_repr.default.hard_storage_limit_per_operation)
|
||||
?(commitments = [])
|
||||
?(security_deposit_ramp_up_cycles = None)
|
||||
?(no_reward_cycles = None)
|
||||
(initial_accounts : (account * Tez_repr.t) list)
|
||||
=
|
||||
if initial_accounts = [] then
|
||||
Pervasives.failwith "Must have one account with a roll to bake";
|
||||
|
||||
(* Check there is at least one roll *)
|
||||
let open Tezos_base.TzPervasives.Error_monad in
|
||||
begin try
|
||||
let (>>?=) x y = match x with
|
||||
| Ok(a) -> y a
|
||||
| Error(b) -> fail @@ List.hd b in
|
||||
fold_left_s (fun acc (_, amount) ->
|
||||
Alpha_environment.wrap_error @@
|
||||
Tez_repr.(+?) acc amount >>?= fun acc ->
|
||||
if acc >= tokens_per_roll then
|
||||
raise Exit
|
||||
else return acc
|
||||
) Tez_repr.zero initial_accounts >>=? fun _ ->
|
||||
failwith "Insufficient tokens in initial accounts to create one roll"
|
||||
with Exit -> return ()
|
||||
end >>=? fun () ->
|
||||
|
||||
let constants : Constants_repr.parametric = {
|
||||
preserved_cycles ;
|
||||
blocks_per_cycle ;
|
||||
blocks_per_commitment ;
|
||||
blocks_per_roll_snapshot ;
|
||||
blocks_per_voting_period ;
|
||||
time_between_blocks ;
|
||||
endorsers_per_block ;
|
||||
hard_gas_limit_per_operation ;
|
||||
hard_gas_limit_per_block ;
|
||||
proof_of_work_threshold ;
|
||||
tokens_per_roll ;
|
||||
michelson_maximum_type_size ;
|
||||
seed_nonce_revelation_tip ;
|
||||
origination_size ;
|
||||
block_security_deposit ;
|
||||
endorsement_security_deposit ;
|
||||
block_reward ;
|
||||
endorsement_reward ;
|
||||
cost_per_byte ;
|
||||
hard_storage_limit_per_operation ;
|
||||
} in
|
||||
check_constants_consistency constants >>=? fun () ->
|
||||
|
||||
let hash =
|
||||
Alpha_environment.Block_hash.of_b58check_exn "BLockGenesisGenesisGenesisGenesisGenesisCCCCCeZiLHU"
|
||||
in
|
||||
let shell = make_shell
|
||||
~level:0l
|
||||
~predecessor:hash
|
||||
~timestamp:Tezos_base.TzPervasives.Time.epoch
|
||||
~fitness: (Fitness_repr.from_int64 0L)
|
||||
~operations_hash: Alpha_environment.Operation_list_list_hash.zero in
|
||||
initial_context
|
||||
constants
|
||||
shell
|
||||
commitments
|
||||
initial_accounts
|
||||
security_deposit_ramp_up_cycles
|
||||
no_reward_cycles
|
||||
>>=? fun context ->
|
||||
return (context, shell, hash)
|
||||
|
||||
let init
|
||||
?(slow=false)
|
||||
?preserved_cycles
|
||||
?endorsers_per_block
|
||||
?commitments
|
||||
n =
|
||||
let open Error_monad in
|
||||
let accounts = generate_accounts n in
|
||||
let contracts = List.map (fun (a, _) ->
|
||||
Alpha_context.Contract.implicit_contract (a.pkh)) accounts in
|
||||
begin
|
||||
if slow then
|
||||
genesis
|
||||
?preserved_cycles
|
||||
?endorsers_per_block
|
||||
?commitments
|
||||
accounts
|
||||
else
|
||||
genesis
|
||||
?preserved_cycles
|
||||
~blocks_per_cycle:32l
|
||||
~blocks_per_commitment:4l
|
||||
~blocks_per_roll_snapshot:8l
|
||||
~blocks_per_voting_period:(Int32.mul 32l 8l)
|
||||
?endorsers_per_block
|
||||
?commitments
|
||||
accounts
|
||||
end >>=? fun ctxt ->
|
||||
return (ctxt, accounts, contracts)
|
||||
|
||||
let contents
|
||||
?(proof_of_work_nonce = default_proof_of_work_nonce)
|
||||
?(priority = 0) ?seed_nonce_hash () =
|
||||
Alpha_context.Block_header.({
|
||||
priority ;
|
||||
proof_of_work_nonce ;
|
||||
seed_nonce_hash ;
|
||||
})
|
||||
|
||||
|
||||
let begin_construction ?(priority=0) ~timestamp ~(header:Alpha_context.Block_header.shell_header) ~hash ctxt =
|
||||
let contents = contents ~priority () in
|
||||
let protocol_data =
|
||||
let open! Alpha_context.Block_header in {
|
||||
contents ;
|
||||
signature = Signature.zero ;
|
||||
} in
|
||||
let timestamp = Alpha_environment.Time.add timestamp @@ Int64.of_int 180 in
|
||||
Main.begin_construction
|
||||
~chain_id: Alpha_environment.Chain_id.zero
|
||||
~predecessor_context: ctxt
|
||||
~predecessor_timestamp: header.timestamp
|
||||
~predecessor_fitness: header.fitness
|
||||
~predecessor_level: header.level
|
||||
~predecessor:hash
|
||||
~timestamp
|
||||
~protocol_data
|
||||
() >>= fun x -> Lwt.return @@ Alpha_environment.wrap_error x >>=? fun state ->
|
||||
return state.ctxt
|
||||
|
||||
let main n =
|
||||
init n >>=? fun ((ctxt, header, hash), accounts, contracts) ->
|
||||
let timestamp = Tezos_base.Time.now () in
|
||||
begin_construction ~timestamp ~header ~hash ctxt >>=? fun ctxt ->
|
||||
return (ctxt, accounts, contracts)
|
||||
|
||||
end
|
||||
|
||||
type identity = {
|
||||
public_key_hash : Signature.public_key_hash;
|
||||
public_key : Signature.public_key;
|
||||
secret_key : Signature.secret_key;
|
||||
implicit_contract : Alpha_context.Contract.t;
|
||||
}
|
||||
|
||||
type environment = {
|
||||
tezos_context : Alpha_context.t ;
|
||||
identities : identity list ;
|
||||
}
|
||||
|
||||
let init_environment () =
|
||||
Context_init.main 10 >>=? fun (tezos_context, accounts, contracts) ->
|
||||
let accounts = List.map fst accounts in
|
||||
let tezos_context = Alpha_context.Gas.set_limit tezos_context @@ Z.of_int 350000 in
|
||||
let identities =
|
||||
List.map (fun ((a:Context_init.account), c) -> {
|
||||
public_key = a.pk ;
|
||||
public_key_hash = a.pkh ;
|
||||
secret_key = a.sk ;
|
||||
implicit_contract = c ;
|
||||
}) @@
|
||||
List.combine accounts contracts in
|
||||
return {tezos_context ; identities}
|
||||
|
||||
let contextualize ~msg ?environment f =
|
||||
let lwt =
|
||||
let environment = match environment with
|
||||
| None -> init_environment ()
|
||||
| Some x -> return x in
|
||||
environment >>=? f
|
||||
in
|
||||
force_ok ~msg @@ Lwt_main.run lwt
|
||||
|
||||
let dummy_environment =
|
||||
X_error_monad.force_lwt ~msg:"Init_proto_alpha : initing dummy environment" @@
|
||||
init_environment ()
|
52
vendors/ligo-utils/proto-alpha-utils/proto-alpha-utils.opam
vendored
Normal file
52
vendors/ligo-utils/proto-alpha-utils/proto-alpha-utils.opam
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
opam-version: "2.0"
|
||||
name: "proto-alpha-utils"
|
||||
version: "dev"
|
||||
synopsis: "LIGO Proto Alpha-specific Utilities, to be used by other libraries"
|
||||
maintainer: "Galfour <ligolang@gmail.com>"
|
||||
authors: "Galfour <ligolang@gmail.com>"
|
||||
license: "MIT"
|
||||
homepage: "https://gitlab.com/ligolang/ligo-utils"
|
||||
bug-reports: "https://gitlab.com/ligolang/ligo-utils/issues"
|
||||
depends: [
|
||||
"dune"
|
||||
"base"
|
||||
"base"
|
||||
"bigstring"
|
||||
"calendar"
|
||||
"cohttp-lwt-unix"
|
||||
"cstruct"
|
||||
"ezjsonm"
|
||||
"hex"
|
||||
"hidapi"
|
||||
"ipaddr"
|
||||
"irmin"
|
||||
"js_of_ocaml"
|
||||
"lwt"
|
||||
"lwt_log"
|
||||
"mtime"
|
||||
"ocplib-endian"
|
||||
"ocp-ocamlres"
|
||||
"re"
|
||||
"rresult"
|
||||
"stdio"
|
||||
"uri"
|
||||
"uutf"
|
||||
"zarith"
|
||||
"ocplib-json-typed"
|
||||
"ocplib-json-typed-bson"
|
||||
"tezos-crypto"
|
||||
"tezos-stdlib-unix"
|
||||
"tezos-data-encoding"
|
||||
"tezos-protocol-environment"
|
||||
"tezos-protocol-alpha"
|
||||
"michelson-parser"
|
||||
"simple-utils"
|
||||
"tezos-utils"
|
||||
# from ppx_let:
|
||||
"ocaml" {>= "4.04.2" & < "4.08.0"}
|
||||
"dune" {build & >= "1.5.1"}
|
||||
"ppxlib" {>= "0.5.0"}
|
||||
]
|
||||
build: [
|
||||
["dune" "build" "-p" name]
|
||||
]
|
9
vendors/ligo-utils/proto-alpha-utils/proto_alpha_utils.ml
vendored
Normal file
9
vendors/ligo-utils/proto-alpha-utils/proto_alpha_utils.ml
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
module Stdlib_unix = Tezos_stdlib_unix
|
||||
module Data_encoding = Tezos_data_encoding
|
||||
module Crypto = Tezos_crypto
|
||||
module Signature = Tezos_base.TzPervasives.Signature
|
||||
module Time = Tezos_base.TzPervasives.Time
|
||||
module Memory_proto_alpha = X_memory_proto_alpha
|
||||
module Error_monad = X_error_monad
|
||||
module Cast = Cast
|
||||
module Trace = Trace
|
44
vendors/ligo-utils/proto-alpha-utils/trace.ml
vendored
Normal file
44
vendors/ligo-utils/proto-alpha-utils/trace.ml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
include Simple_utils.Trace
|
||||
|
||||
module AE = Memory_proto_alpha.Alpha_environment
|
||||
module TP = Tezos_base__TzPervasives
|
||||
|
||||
let of_tz_error (err:X_error_monad.error) : error_thunk =
|
||||
let str () = X_error_monad.(to_string err) in
|
||||
error (thunk "alpha error") str
|
||||
|
||||
let of_alpha_tz_error err = of_tz_error (AE.Ecoproto_error err)
|
||||
|
||||
let trace_alpha_tzresult err : 'a AE.Error_monad.tzresult -> 'a result =
|
||||
function
|
||||
| Result.Ok x -> ok x
|
||||
| Error errs -> Errors (err :: List.map of_alpha_tz_error errs)
|
||||
|
||||
let trace_alpha_tzresult_lwt error (x:_ AE.Error_monad.tzresult Lwt.t) : _ result =
|
||||
trace_alpha_tzresult error @@ Lwt_main.run x
|
||||
|
||||
let trace_tzresult err =
|
||||
function
|
||||
| Result.Ok x -> ok x
|
||||
| Error errs -> Errors (err :: List.map of_tz_error errs)
|
||||
|
||||
(* TODO: should be a combination of trace_tzresult and trace_r *)
|
||||
let trace_tzresult_r err_thunk_may_fail =
|
||||
function
|
||||
| Result.Ok x -> ok x
|
||||
| Error errs ->
|
||||
let tz_errs = List.map of_tz_error errs in
|
||||
match err_thunk_may_fail () with
|
||||
| Simple_utils.Trace.Ok (err, annotations) -> ignore annotations; Errors (err :: tz_errs)
|
||||
| Errors errors_while_generating_error ->
|
||||
(* TODO: the complexity could be O(n*n) in the worst case,
|
||||
this should use some catenable lists. *)
|
||||
Errors (errors_while_generating_error
|
||||
@ tz_errs)
|
||||
|
||||
let trace_tzresult_lwt err (x:_ TP.Error_monad.tzresult Lwt.t) : _ result =
|
||||
trace_tzresult err @@ Lwt_main.run x
|
||||
|
||||
let trace_tzresult_lwt_r err (x:_ TP.Error_monad.tzresult Lwt.t) : _ result =
|
||||
trace_tzresult_r err @@ Lwt_main.run x
|
||||
|
25
vendors/ligo-utils/proto-alpha-utils/x_error_monad.ml
vendored
Normal file
25
vendors/ligo-utils/proto-alpha-utils/x_error_monad.ml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
include Tezos_error_monad.Error_monad
|
||||
include Tezos_utils.Error_monad
|
||||
open Memory_proto_alpha
|
||||
|
||||
let (>>??) = Alpha_environment.Error_monad.(>>?)
|
||||
|
||||
let alpha_wrap a = Alpha_environment.wrap_error a
|
||||
|
||||
let force_ok_alpha ~msg a = force_ok ~msg @@ alpha_wrap a
|
||||
|
||||
let force_lwt ~msg a = force_ok ~msg @@ Lwt_main.run a
|
||||
|
||||
let force_lwt_alpha ~msg a = force_ok ~msg @@ alpha_wrap @@ Lwt_main.run a
|
||||
|
||||
let assert_error () = function
|
||||
| Ok _ -> fail @@ failure "assert_error"
|
||||
| Error _ -> return ()
|
||||
|
||||
let (>>=??) a f =
|
||||
a >>= fun a ->
|
||||
match alpha_wrap a with
|
||||
| Ok result -> f result
|
||||
| Error errs -> Lwt.return (Error errs)
|
||||
|
||||
|
133
vendors/ligo-utils/proto-alpha-utils/x_memory_proto_alpha.ml
vendored
Normal file
133
vendors/ligo-utils/proto-alpha-utils/x_memory_proto_alpha.ml
vendored
Normal file
@ -0,0 +1,133 @@
|
||||
module Michelson = Tezos_utils.Michelson
|
||||
|
||||
include Memory_proto_alpha
|
||||
let init_environment = Init_proto_alpha.init_environment
|
||||
let dummy_environment = Init_proto_alpha.dummy_environment
|
||||
|
||||
open X_error_monad
|
||||
open Script_typed_ir
|
||||
open Script_ir_translator
|
||||
open Script_interpreter
|
||||
|
||||
let stack_ty_eq (type a b)
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
(a:a stack_ty) (b:b stack_ty) =
|
||||
alpha_wrap (Script_ir_translator.stack_ty_eq tezos_context 0 a b) >>? fun (Eq, _) ->
|
||||
ok Eq
|
||||
|
||||
let ty_eq (type a b)
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
(a:a ty) (b:b ty)
|
||||
=
|
||||
alpha_wrap (Script_ir_translator.ty_eq tezos_context a b) >>? fun (Eq, _) ->
|
||||
ok Eq
|
||||
|
||||
let parse_michelson (type aft)
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
?(top_level = Lambda) (michelson:Michelson.t)
|
||||
?type_logger
|
||||
(bef:'a Script_typed_ir.stack_ty) (aft:aft Script_typed_ir.stack_ty)
|
||||
=
|
||||
let michelson = Michelson.strip_annots michelson in
|
||||
let michelson = Michelson.strip_nops michelson in
|
||||
parse_instr
|
||||
?type_logger
|
||||
top_level tezos_context
|
||||
michelson bef >>=?? fun (j, _) ->
|
||||
match j with
|
||||
| Typed descr -> (
|
||||
Lwt.return (
|
||||
alpha_wrap (Script_ir_translator.stack_ty_eq tezos_context 0 descr.aft aft) >>? fun (Eq, _) ->
|
||||
let descr : (_, aft) Script_typed_ir.descr = {descr with aft} in
|
||||
Ok descr
|
||||
)
|
||||
)
|
||||
| _ -> Lwt.return @@ error_exn (Failure "Typing instr failed")
|
||||
|
||||
let parse_michelson_fail (type aft)
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
?(top_level = Lambda) (michelson:Michelson.t)
|
||||
?type_logger
|
||||
(bef:'a Script_typed_ir.stack_ty) (aft:aft Script_typed_ir.stack_ty)
|
||||
=
|
||||
let michelson = Michelson.strip_annots michelson in
|
||||
let michelson = Michelson.strip_nops michelson in
|
||||
parse_instr
|
||||
?type_logger
|
||||
top_level tezos_context
|
||||
michelson bef >>=?? fun (j, _) ->
|
||||
match j with
|
||||
| Typed descr -> (
|
||||
Lwt.return (
|
||||
alpha_wrap (Script_ir_translator.stack_ty_eq tezos_context 0 descr.aft aft) >>? fun (Eq, _) ->
|
||||
let descr : (_, aft) Script_typed_ir.descr = {descr with aft} in
|
||||
Ok descr
|
||||
)
|
||||
)
|
||||
| Failed { descr } ->
|
||||
Lwt.return (Ok (descr aft))
|
||||
|
||||
let parse_michelson_data
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
michelson ty =
|
||||
let michelson = Michelson.strip_annots michelson in
|
||||
let michelson = Michelson.strip_nops michelson in
|
||||
parse_data tezos_context ty michelson >>=?? fun (data, _) ->
|
||||
return data
|
||||
|
||||
let parse_michelson_ty
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
?(allow_big_map = true) ?(allow_operation = true)
|
||||
michelson =
|
||||
let michelson = Michelson.strip_annots michelson in
|
||||
let michelson = Michelson.strip_nops michelson in
|
||||
Lwt.return @@ parse_ty tezos_context ~allow_big_map ~allow_operation michelson >>=?? fun (ty, _) ->
|
||||
return ty
|
||||
|
||||
let unparse_michelson_data
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
?mapper ty value : Michelson.t tzresult Lwt.t =
|
||||
Script_ir_translator.unparse_data tezos_context ?mapper
|
||||
Readable ty value >>=?? fun (michelson, _) ->
|
||||
return michelson
|
||||
|
||||
let unparse_michelson_ty
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
ty : Michelson.t tzresult Lwt.t =
|
||||
Script_ir_translator.unparse_ty tezos_context ty >>=?? fun (michelson, _) ->
|
||||
return michelson
|
||||
|
||||
type options = {
|
||||
tezos_context: Alpha_context.t ;
|
||||
source: Alpha_context.Contract.t ;
|
||||
payer: Alpha_context.Contract.t ;
|
||||
self: Alpha_context.Contract.t ;
|
||||
amount: Alpha_context.Tez.t ;
|
||||
}
|
||||
|
||||
let make_options
|
||||
?(tezos_context = dummy_environment.tezos_context)
|
||||
?(source = (List.nth dummy_environment.identities 0).implicit_contract)
|
||||
?(self = (List.nth dummy_environment.identities 0).implicit_contract)
|
||||
?(payer = (List.nth dummy_environment.identities 1).implicit_contract)
|
||||
?(amount = Alpha_context.Tez.one) ()
|
||||
= {
|
||||
tezos_context ;
|
||||
source ;
|
||||
self ;
|
||||
payer ;
|
||||
amount ;
|
||||
}
|
||||
|
||||
let default_options = make_options ()
|
||||
|
||||
let interpret ?(options = default_options) ?visitor (instr:('a, 'b) descr) (bef:'a stack) : 'b stack tzresult Lwt.t =
|
||||
let {
|
||||
tezos_context ;
|
||||
source ;
|
||||
self ;
|
||||
payer ;
|
||||
amount ;
|
||||
} = options in
|
||||
Script_interpreter.step tezos_context ~source ~self ~payer ?visitor amount instr bef >>=??
|
||||
fun (stack, _) -> return stack
|
59
vendors/ligo-utils/simple-utils/PP_helpers.ml
vendored
Normal file
59
vendors/ligo-utils/simple-utils/PP_helpers.ml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
open Format
|
||||
let string : formatter -> string -> unit = fun ppf s -> fprintf ppf "%s" s
|
||||
let tag tag : formatter -> unit -> unit = fun ppf () -> fprintf ppf tag
|
||||
let bool ppf b = fprintf ppf "%b" b
|
||||
let pair f g ppf (a , b) = fprintf ppf "%a , %a" f a g b
|
||||
let new_line : formatter -> unit -> unit = tag "@;"
|
||||
let rec new_lines n ppf () =
|
||||
match n with
|
||||
| 0 -> new_line ppf ()
|
||||
| n -> new_line ppf () ; new_lines (n-1) ppf ()
|
||||
let const const : formatter -> unit -> unit = fun ppf () -> fprintf ppf "%s" const
|
||||
let comment : formatter -> string -> unit = fun ppf s -> fprintf ppf "(* %s *)" s
|
||||
let list_sep value separator = pp_print_list ~pp_sep:separator value
|
||||
let list value = pp_print_list ~pp_sep:(tag "") value
|
||||
let ne_list_sep value separator ppf (hd, tl) =
|
||||
value ppf hd ;
|
||||
separator ppf () ;
|
||||
pp_print_list ~pp_sep:separator value ppf tl
|
||||
|
||||
let prepend s f ppf a =
|
||||
fprintf ppf "%s%a" s f a
|
||||
|
||||
let option = fun f ppf opt ->
|
||||
match opt with
|
||||
| Some x -> fprintf ppf "Some(%a)" f x
|
||||
| None -> fprintf ppf "None"
|
||||
|
||||
let lr = fun ppf lr ->
|
||||
match lr with
|
||||
| `Left -> fprintf ppf "left"
|
||||
| `Right -> fprintf ppf "right"
|
||||
|
||||
let int = fun ppf n -> fprintf ppf "%d" n
|
||||
|
||||
let map = fun f pp ppf x ->
|
||||
pp ppf (f x)
|
||||
|
||||
let pair_sep value sep ppf (a, b) = fprintf ppf "%a %s %a" value a sep value b
|
||||
let smap_sep value sep ppf m =
|
||||
let module SMap = X_map.String in
|
||||
let lst = SMap.to_kv_list m in
|
||||
let new_pp ppf (k, v) = fprintf ppf "%s -> %a" k value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
(* TODO: remove all uses. this is bad. *)
|
||||
let printer : ('a -> unit) -> _ -> 'a -> unit = fun f ppf x ->
|
||||
let oldstdout = Unix.dup Unix.stdout in
|
||||
let name = "/tmp/wtf-" ^ (string_of_int @@ Random.bits ()) in
|
||||
let newstdout = open_out name in
|
||||
Unix.dup2 (Unix.descr_of_out_channel newstdout) Unix.stdout;
|
||||
f x;
|
||||
flush stdout;
|
||||
Unix.dup2 oldstdout Unix.stdout;
|
||||
let ic = open_in name in
|
||||
let n = in_channel_length ic in
|
||||
let s = Bytes.create n in
|
||||
really_input ic s 0 n;
|
||||
close_in ic;
|
||||
fprintf ppf "%s" (Bytes.to_string s)
|
53
vendors/ligo-utils/simple-utils/dictionary.ml
vendored
Normal file
53
vendors/ligo-utils/simple-utils/dictionary.ml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
open Trace
|
||||
|
||||
module type DICTIONARY = sig
|
||||
type ('a, 'b) t
|
||||
|
||||
val get_exn : ('a, 'b) t -> 'a -> 'b
|
||||
val get : ('a, 'b) t -> 'a -> 'b result
|
||||
|
||||
val set :
|
||||
?equal:('a -> 'a -> bool) ->
|
||||
('a, 'b) t -> 'a -> 'b -> ('a, 'b) t
|
||||
|
||||
val del :
|
||||
?equal:('a -> 'a -> bool) ->
|
||||
('a, 'b) t -> 'a -> ('a, 'b) t
|
||||
|
||||
val to_list : ('a, 'b) t -> ('a * 'b) list
|
||||
end
|
||||
|
||||
module Assoc : DICTIONARY = struct
|
||||
|
||||
type ('a, 'b) t = ('a * 'b) list
|
||||
|
||||
let get_exn x y = List.assoc y x
|
||||
|
||||
let get x y = generic_try (simple_error "Dictionry.get") @@ fun () -> get_exn x y
|
||||
|
||||
let set ?equal lst a b =
|
||||
let equal : 'a -> 'a -> bool =
|
||||
X_option.unopt
|
||||
~default:(=) equal
|
||||
in
|
||||
let rec aux acc = function
|
||||
| [] -> List.rev acc
|
||||
| (key, _)::tl when equal key a -> aux ((key, b) :: acc) tl
|
||||
| hd::tl -> aux (hd :: acc) tl
|
||||
in
|
||||
aux [] lst
|
||||
|
||||
let del ?equal lst a =
|
||||
let equal : 'a -> 'a -> bool =
|
||||
X_option.unopt
|
||||
~default:(=) equal
|
||||
in
|
||||
let rec aux acc = function
|
||||
| [] -> List.rev acc
|
||||
| (key, _)::tl when equal key a -> aux acc tl
|
||||
| hd::tl -> aux (hd :: acc) tl
|
||||
in
|
||||
aux [] lst
|
||||
|
||||
let to_list x = x
|
||||
end
|
9
vendors/ligo-utils/simple-utils/dune
vendored
Normal file
9
vendors/ligo-utils/simple-utils/dune
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
(library
|
||||
(name simple_utils)
|
||||
(public_name simple-utils)
|
||||
(libraries
|
||||
yojson
|
||||
unix
|
||||
str
|
||||
)
|
||||
)
|
1
vendors/ligo-utils/simple-utils/dune-project
vendored
Normal file
1
vendors/ligo-utils/simple-utils/dune-project
vendored
Normal file
@ -0,0 +1 @@
|
||||
(lang dune 1.6)
|
8
vendors/ligo-utils/simple-utils/function.ml
vendored
Normal file
8
vendors/ligo-utils/simple-utils/function.ml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
let constant x _ = x
|
||||
|
||||
let compose = fun f g x -> f (g x)
|
||||
let (>|) = compose
|
||||
|
||||
let compose_2 = fun f g x y -> f (g x y)
|
||||
let compose_3 = fun f g x y z -> f (g x y z)
|
||||
let compose_4 = fun f g a b c d -> f (g a b c d)
|
37
vendors/ligo-utils/simple-utils/location.ml
vendored
Normal file
37
vendors/ligo-utils/simple-utils/location.ml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
(* type file_location = { *)
|
||||
(* filename : string ; *)
|
||||
(* start_line : int ; *)
|
||||
(* start_column : int ; *)
|
||||
(* end_line : int ; *)
|
||||
(* end_column : int ; *)
|
||||
(* } *)
|
||||
|
||||
type virtual_location = string
|
||||
|
||||
type t =
|
||||
| File of Region.t (* file_location *)
|
||||
| Virtual of virtual_location
|
||||
|
||||
let make (start_pos:Lexing.position) (end_pos:Lexing.position) : t =
|
||||
(* TODO: give correct unicode offsets (the random number is here so
|
||||
that searching for wrong souce locations appearing in messages
|
||||
will quickly lead here *)
|
||||
File (Region.make
|
||||
~start:(Pos.make ~byte:start_pos ~point_num:(-1897000) ~point_bol:(-1897000))
|
||||
~stop:(Pos.make ~byte:end_pos ~point_num:(-1897000) ~point_bol:(-1897000)))
|
||||
|
||||
let virtual_location s = Virtual s
|
||||
let dummy = virtual_location "dummy"
|
||||
|
||||
type 'a wrap = {
|
||||
wrap_content : 'a ;
|
||||
location : t ;
|
||||
}
|
||||
|
||||
let wrap ~loc wrap_content = { wrap_content ; location = loc }
|
||||
let unwrap { wrap_content ; _ } = wrap_content
|
||||
let map f x = { x with wrap_content = f x.wrap_content }
|
||||
let pp_wrap f ppf { wrap_content ; _ } = Format.fprintf ppf "%a" f wrap_content
|
||||
|
||||
let lift_region : 'a Region.reg -> 'a wrap = fun x ->
|
||||
wrap ~loc:(File x.region) x.value
|
11
vendors/ligo-utils/simple-utils/logger.ml
vendored
Normal file
11
vendors/ligo-utils/simple-utils/logger.ml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
module Stateful () : sig
|
||||
val log : string -> unit
|
||||
val get : unit -> string
|
||||
end = struct
|
||||
|
||||
let logger = ref ""
|
||||
let log : string -> unit =
|
||||
fun s -> logger := !logger ^ s
|
||||
let get () : string = !logger
|
||||
|
||||
end
|
0
vendors/ligo-utils/simple-utils/ne_list.ml
vendored
Normal file
0
vendors/ligo-utils/simple-utils/ne_list.ml
vendored
Normal file
143
vendors/ligo-utils/simple-utils/pos.ml
vendored
Normal file
143
vendors/ligo-utils/simple-utils/pos.ml
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
type t = <
|
||||
byte : Lexing.position;
|
||||
point_num : int;
|
||||
point_bol : int;
|
||||
file : string;
|
||||
line : int;
|
||||
|
||||
set_file : string -> t;
|
||||
set_line : int -> t;
|
||||
set_offset : int -> t;
|
||||
set : file:string -> line:int -> offset:int -> t;
|
||||
new_line : string -> t;
|
||||
add_nl : t;
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
|
||||
offset : [`Byte | `Point] -> int;
|
||||
column : [`Byte | `Point] -> int;
|
||||
|
||||
line_offset : [`Byte | `Point] -> int;
|
||||
byte_offset : int;
|
||||
|
||||
is_ghost : bool;
|
||||
|
||||
to_string : ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
compact : ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
anonymous : ?offsets:bool -> [`Byte | `Point] -> string
|
||||
>
|
||||
|
||||
type pos = t
|
||||
|
||||
(* Constructors *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
let make ~byte ~point_num ~point_bol =
|
||||
let () = assert (point_num >= point_bol) in
|
||||
object (self)
|
||||
val byte = byte
|
||||
method byte = byte
|
||||
|
||||
val point_num = point_num
|
||||
method point_num = point_num
|
||||
|
||||
val point_bol = point_bol
|
||||
method point_bol = point_bol
|
||||
|
||||
method set_file file =
|
||||
{< byte = Lexing.{byte with pos_fname = file} >}
|
||||
|
||||
method set_line line =
|
||||
{< byte = Lexing.{byte with pos_lnum = line} >}
|
||||
|
||||
method set_offset offset =
|
||||
{< byte = Lexing.{byte with pos_cnum = byte.pos_bol + offset} >}
|
||||
|
||||
method set ~file ~line ~offset =
|
||||
let pos = self#set_file file in
|
||||
let pos = pos#set_line line in
|
||||
let pos = pos#set_offset offset
|
||||
in pos
|
||||
|
||||
(* The string must not contain '\n'. See [new_line]. *)
|
||||
|
||||
method shift_bytes len =
|
||||
{< byte = Lexing.{byte with pos_cnum = byte.pos_cnum + len};
|
||||
point_num = point_num + len >}
|
||||
|
||||
method shift_one_uchar len =
|
||||
{< byte = Lexing.{byte with pos_cnum = byte.pos_cnum + len};
|
||||
point_num = point_num + 1 >}
|
||||
|
||||
method add_nl =
|
||||
{< byte = Lexing.{byte with
|
||||
pos_lnum = byte.pos_lnum + 1;
|
||||
pos_bol = byte.pos_cnum};
|
||||
point_bol = point_num >}
|
||||
|
||||
method new_line string =
|
||||
let len = String.length string
|
||||
in (self#shift_bytes len)#add_nl
|
||||
|
||||
method is_ghost = byte = Lexing.dummy_pos
|
||||
|
||||
method file = byte.Lexing.pos_fname
|
||||
|
||||
method line = byte.Lexing.pos_lnum
|
||||
|
||||
method offset = function
|
||||
`Byte -> Lexing.(byte.pos_cnum - byte.pos_bol)
|
||||
| `Point -> point_num - point_bol
|
||||
|
||||
method column mode = 1 + self#offset mode
|
||||
|
||||
method line_offset = function
|
||||
`Byte -> byte.Lexing.pos_bol
|
||||
| `Point -> point_bol
|
||||
|
||||
method byte_offset = byte.Lexing.pos_cnum
|
||||
|
||||
method to_string ?(offsets=true) mode =
|
||||
let offset = self#offset mode in
|
||||
let horizontal, value =
|
||||
if offsets then "character", offset else "column", offset + 1
|
||||
in sprintf "File \"%s\", line %i, %s %i"
|
||||
self#file self#line horizontal value
|
||||
|
||||
method compact ?(offsets=true) mode =
|
||||
if self#is_ghost then "ghost"
|
||||
else
|
||||
let offset = self#offset mode in
|
||||
sprintf "%s:%i:%i"
|
||||
self#file self#line (if offsets then offset else offset + 1)
|
||||
|
||||
method anonymous ?(offsets=true) mode =
|
||||
if self#is_ghost then "ghost"
|
||||
else sprintf "%i:%i" self#line
|
||||
(if offsets then self#offset mode else self#column mode)
|
||||
end
|
||||
|
||||
let from_byte byte =
|
||||
let point_num = byte.Lexing.pos_cnum
|
||||
and point_bol = byte.Lexing.pos_bol
|
||||
in make ~byte ~point_num ~point_bol
|
||||
|
||||
let ghost = make ~byte:Lexing.dummy_pos ~point_num:(-1) ~point_bol:(-1)
|
||||
|
||||
let min =
|
||||
let byte = Lexing.{
|
||||
pos_fname = "";
|
||||
pos_lnum = 1;
|
||||
pos_bol = 0;
|
||||
pos_cnum = 0}
|
||||
in make ~byte ~point_num:0 ~point_bol:0
|
||||
|
||||
(* Comparisons *)
|
||||
|
||||
let equal pos1 pos2 =
|
||||
pos1#file = pos2#file && pos1#byte_offset = pos2#byte_offset
|
||||
|
||||
let lt pos1 pos2 =
|
||||
pos1#file = pos2#file && pos1#byte_offset < pos2#byte_offset
|
108
vendors/ligo-utils/simple-utils/pos.mli
vendored
Normal file
108
vendors/ligo-utils/simple-utils/pos.mli
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
(* Positions in a file
|
||||
|
||||
A position in a file denotes a single unit belonging to it, for
|
||||
example, in an ASCII text file, it is a particular character within
|
||||
that file (the unit is the byte in this instance, since in ASCII
|
||||
one character is encoded with one byte).
|
||||
|
||||
Units can be either bytes (as ASCII characters) or, more
|
||||
generally, unicode points.
|
||||
|
||||
The type for positions is the object type [t].
|
||||
|
||||
We use here lexing positions to denote byte-oriented positions
|
||||
(field [byte]), and we manage code points by means of the fields
|
||||
[point_num] and [point_bol]. These two fields have a meaning
|
||||
similar to the fields [pos_cnum] and [pos_bol], respectively, from
|
||||
the standard module [Lexing]. That is to say, [point_num] holds the
|
||||
number of code points since the beginning of the file, and
|
||||
[point_bol] the number of code points since the beginning of the
|
||||
current line.
|
||||
|
||||
The name of the file is given by the field [file], and the line
|
||||
number by the field [line].
|
||||
*)
|
||||
|
||||
type t = <
|
||||
(* Payload *)
|
||||
|
||||
byte : Lexing.position;
|
||||
point_num : int;
|
||||
point_bol : int;
|
||||
file : string;
|
||||
line : int;
|
||||
|
||||
(* Setters *)
|
||||
|
||||
set_file : string -> t;
|
||||
set_line : int -> t;
|
||||
set_offset : int -> t;
|
||||
set : file:string -> line:int -> offset:int -> t;
|
||||
|
||||
(* The call [pos#new_line s], where the string [s] is either "\n" or
|
||||
"\c\r", updates the position [pos] with a new line. *)
|
||||
|
||||
new_line : string -> t;
|
||||
add_nl : t;
|
||||
|
||||
(* The call [pos#shift_bytes n] evaluates in a position that is the
|
||||
translation of position [pos] of [n] bytes forward in the
|
||||
file. The call [pos#shift_one_uchar n] is similar, except that it
|
||||
assumes that [n] is the number of bytes making up one unicode
|
||||
point. *)
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
|
||||
(* Getters *)
|
||||
|
||||
(* The call [pos#offset `Byte] provides the horizontal offset of the
|
||||
position [pos] in bytes. (An offset is the number of units, like
|
||||
bytes, since the beginning of the current line.) The call
|
||||
[pos#offset `Point] is the offset counted in number of unicode
|
||||
points.
|
||||
|
||||
The calls to the method [column] are similar to those to
|
||||
[offset], except that they give the curren column number.
|
||||
|
||||
The call [pos#line_offset `Byte] is the offset of the line of
|
||||
position [pos], counted in bytes. Dually, [pos#line_offset
|
||||
`Point] counts the same offset in code points.
|
||||
|
||||
The call [pos#byte_offset] is the offset of the position [pos]
|
||||
since the begininng of the file, counted in bytes.
|
||||
*)
|
||||
|
||||
offset : [`Byte | `Point] -> int;
|
||||
column : [`Byte | `Point] -> int;
|
||||
|
||||
line_offset : [`Byte | `Point] -> int;
|
||||
byte_offset : int;
|
||||
|
||||
(* Predicates *)
|
||||
|
||||
is_ghost : bool;
|
||||
|
||||
(* Conversions to [string] *)
|
||||
|
||||
to_string : ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
compact : ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
anonymous : ?offsets:bool -> [`Byte | `Point] -> string
|
||||
>
|
||||
|
||||
type pos = t
|
||||
|
||||
(* Constructors *)
|
||||
|
||||
val make : byte:Lexing.position -> point_num:int -> point_bol:int -> t
|
||||
val from_byte : Lexing.position -> t
|
||||
|
||||
(* Special positions *)
|
||||
|
||||
val ghost : t (* Same as [Lexing.dummy_pos] *)
|
||||
val min : t (* Lexing convention: line 1, offsets to 0 and file to "". *)
|
||||
|
||||
(* Comparisons *)
|
||||
|
||||
val equal : t -> t -> bool
|
||||
val lt : t -> t -> bool
|
5
vendors/ligo-utils/simple-utils/ppx_let_generalized/.gitignore
vendored
Normal file
5
vendors/ligo-utils/simple-utils/ppx_let_generalized/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
_build
|
||||
*.install
|
||||
*.merlin
|
||||
_opam
|
||||
|
17
vendors/ligo-utils/simple-utils/ppx_let_generalized/CHANGES.md
vendored
Normal file
17
vendors/ligo-utils/simple-utils/ppx_let_generalized/CHANGES.md
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
## git version
|
||||
|
||||
- Support for `%map.A.B.C` syntax to use values from a specific module, rather
|
||||
than the one in scope.
|
||||
|
||||
## v0.11
|
||||
|
||||
- Depend on ppxlib instead of (now deprecated) ppx\_core and ppx\_driver.
|
||||
|
||||
## 113.43.00
|
||||
|
||||
- Dropped `Open_in_body` support from ppx\_let, since it was only ever used
|
||||
in confusing chains of `Let_syntax` modules that introduced other
|
||||
`Let_syntax` modules in the "body" (e.g. for defining Commands whose
|
||||
bodies use Async). In this case it was decided that the better
|
||||
practice is to be explicit with `open ___.Let_syntax` at the different
|
||||
transition points, even though this is more verbose.
|
67
vendors/ligo-utils/simple-utils/ppx_let_generalized/CONTRIBUTING.md
vendored
Normal file
67
vendors/ligo-utils/simple-utils/ppx_let_generalized/CONTRIBUTING.md
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
This repository contains open source software that is developed and
|
||||
maintained by [Jane Street][js].
|
||||
|
||||
Contributions to this project are welcome and should be submitted via
|
||||
GitHub pull requests.
|
||||
|
||||
Signing contributions
|
||||
---------------------
|
||||
|
||||
We require that you sign your contributions. Your signature certifies
|
||||
that you wrote the patch or otherwise have the right to pass it on as
|
||||
an open-source patch. The rules are pretty simple: if you can certify
|
||||
the below (from [developercertificate.org][dco]):
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
1 Letterman Drive
|
||||
Suite D4700
|
||||
San Francisco, CA, 94129
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
|
||||
Then you just add a line to every git commit message:
|
||||
|
||||
```
|
||||
Signed-off-by: Joe Smith <joe.smith@email.com>
|
||||
```
|
||||
|
||||
Use your real name (sorry, no pseudonyms or anonymous contributions.)
|
||||
|
||||
If you set your `user.name` and `user.email` git configs, you can sign
|
||||
your commit automatically with git commit -s.
|
||||
|
||||
[dco]: http://developercertificate.org/
|
||||
[js]: https://opensource.janestreet.com/
|
4
vendors/ligo-utils/simple-utils/ppx_let_generalized/CREDITS
vendored
Normal file
4
vendors/ligo-utils/simple-utils/ppx_let_generalized/CREDITS
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
This folder contains a generalization of ppx_let from Jane Street.
|
||||
See git log this_folder for the development history.
|
||||
|
||||
https://github.com/janestreet/ppx_let.git
|
21
vendors/ligo-utils/simple-utils/ppx_let_generalized/LICENSE.md
vendored
Normal file
21
vendors/ligo-utils/simple-utils/ppx_let_generalized/LICENSE.md
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015--2019 Jane Street Group, LLC <opensource@janestreet.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
17
vendors/ligo-utils/simple-utils/ppx_let_generalized/Makefile
vendored
Normal file
17
vendors/ligo-utils/simple-utils/ppx_let_generalized/Makefile
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
INSTALL_ARGS := $(if $(PREFIX),--prefix $(PREFIX),)
|
||||
|
||||
default:
|
||||
dune build
|
||||
|
||||
install:
|
||||
dune install $(INSTALL_ARGS)
|
||||
|
||||
uninstall:
|
||||
dune uninstall $(INSTALL_ARGS)
|
||||
|
||||
reinstall: uninstall install
|
||||
|
||||
clean:
|
||||
dune clean
|
||||
|
||||
.PHONY: default install uninstall reinstall clean
|
169
vendors/ligo-utils/simple-utils/ppx_let_generalized/README.md
vendored
Normal file
169
vendors/ligo-utils/simple-utils/ppx_let_generalized/README.md
vendored
Normal file
@ -0,0 +1,169 @@
|
||||
ppx_let
|
||||
=======
|
||||
|
||||
A ppx rewriter for monadic and applicative let bindings, match expressions, and
|
||||
if expressions.
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
The aim of this rewriter is to make monadic and applicative code look nicer by
|
||||
writing custom binders the same way that we normally bind variables. In OCaml,
|
||||
the common way to bind the result of a computation to a variable is:
|
||||
|
||||
```ocaml
|
||||
let VAR = EXPR in BODY
|
||||
```
|
||||
|
||||
ppx\_let simply adds two new binders: `let%bind` and `let%map`. These are
|
||||
rewritten into calls to the `bind` and `map` functions respectively. These
|
||||
functions are expected to have
|
||||
|
||||
```ocaml
|
||||
val map : 'a t -> f:('a -> 'b) -> 'b t
|
||||
val bind : 'a t -> f:('a -> 'b t) -> 'b t
|
||||
```
|
||||
|
||||
for some type `t`, as one might expect.
|
||||
|
||||
These functions are to be provided by the user, and are generally expected to be
|
||||
part of the signatures of monads and applicatives modules. This is the case for
|
||||
all monads and applicatives defined by the Jane Street's Core suite of
|
||||
libraries. (see the section below on getting the right names into scope).
|
||||
|
||||
### Parallel bindings
|
||||
|
||||
ppx\_let understands parallel bindings as well. i.e.:
|
||||
|
||||
```ocaml
|
||||
let%bind VAR1 = EXPR1 and VAR2 = EXPR2 and VAR3 = EXPR3 in BODY
|
||||
```
|
||||
|
||||
The `and` keyword is seen as a binding combination operator. To do so it expects
|
||||
the presence of a `both` function, that lifts the OCaml pair operation to the
|
||||
type `t` in question:
|
||||
|
||||
```ocaml
|
||||
val both : 'a t -> 'b t -> ('a * 'b) t
|
||||
```
|
||||
|
||||
### Match statements
|
||||
|
||||
We found that this form was quite useful for match statements as well. So for
|
||||
convenience ppx\_let also accepts `%bind` and `%map` on the `match` keyword.
|
||||
Morally `match%bind expr with cases` is seen as `let%bind x = expr in match x
|
||||
with cases`.
|
||||
|
||||
### If statements
|
||||
|
||||
As a further convenience, ppx\_let accepts `%bind` and `%map` on the `if`
|
||||
keyword. The expression `if%bind expr1 then expr2 else expr3` is morally
|
||||
equivalent to `let%bind p = expr1 in if p then expr2 else expr3`.
|
||||
|
||||
Syntactic forms and actual rewriting
|
||||
------------------------------------
|
||||
|
||||
`ppx_let` adds six syntactic forms
|
||||
|
||||
```ocaml
|
||||
let%bind P = M in E
|
||||
|
||||
let%map P = M in E
|
||||
|
||||
match%bind M with P1 -> E1 | P2 -> E2 | ...
|
||||
|
||||
match%map M with P1 -> E1 | P2 -> E2 | ...
|
||||
|
||||
if%bind M then E1 else E2
|
||||
|
||||
if%map M then E1 else E2
|
||||
```
|
||||
|
||||
that expand into
|
||||
|
||||
```ocaml
|
||||
bind M ~f:(fun P -> E)
|
||||
|
||||
map M ~f:(fun P -> E)
|
||||
|
||||
bind M ~f:(function P1 -> E1 | P2 -> E2 | ...)
|
||||
|
||||
map M ~f:(function P1 -> E1 | P2 -> E2 | ...)
|
||||
|
||||
bind M ~f:(function true -> E1 | false -> E2)
|
||||
|
||||
map M ~f:(function true -> E1 | false -> E2)
|
||||
```
|
||||
|
||||
respectively.
|
||||
|
||||
As with `let`, `let%bind` and `let%map` also support multiple *parallel*
|
||||
bindings via the `and` keyword:
|
||||
|
||||
```ocaml
|
||||
let%bind P1 = M1 and P2 = M2 and P3 = M3 and P4 = M4 in E
|
||||
|
||||
let%map P1 = M1 and P2 = M2 and P3 = M3 and P4 = M4 in E
|
||||
```
|
||||
|
||||
that expand into
|
||||
|
||||
```ocaml
|
||||
let x1 = M1 and x2 = M2 and x3 = M3 and x4 = M4 in
|
||||
bind
|
||||
(both x1 (both x2 (both x3 x4)))
|
||||
~f:(fun (P1, (P2, (P3, P4))) -> E)
|
||||
|
||||
let x1 = M1 and x2 = M2 and x3 = M3 and x4 = M4 in
|
||||
map
|
||||
(both x1 (both x2 (both x3 x4)))
|
||||
~f:(fun (P1, (P2, (P3, P4))) -> E)
|
||||
```
|
||||
|
||||
respectively. (Instead of `x1`, `x2`, ... ppx\_let uses variable names that are
|
||||
unlikely to clash with other names)
|
||||
|
||||
As with `let`, names introduced by left-hand sides of the let bindings are not
|
||||
available in subsequent right-hand sides of the same sequence.
|
||||
|
||||
Getting the right names in scope
|
||||
--------------------------------
|
||||
|
||||
The description of how the `%bind` and `%map` syntax extensions expand left out
|
||||
the fact that the names `bind`, `map`, `both`, and `return` are not used
|
||||
directly., but rather qualified by `Let_syntax`. For example, we use
|
||||
`Let_syntax.bind` rather than merely `bind`.
|
||||
|
||||
This means one just needs to get a properly loaded `Let_syntax` module
|
||||
in scope to use `%bind` and `%map`.
|
||||
|
||||
Alternatively, the extension can use values from a `Let_syntax` module
|
||||
other than the one in scope. If you write `%map.A.B.C` instead of
|
||||
`%map`, the expansion will use `A.B.C.Let_syntax.map` instead of
|
||||
`Let_syntax.map` (and similarly for all extension points).
|
||||
|
||||
For monads, `Core.Monad.Make` produces a submodule `Let_syntax` of the
|
||||
appropriate form.
|
||||
|
||||
For applicatives, the convention for these modules is to have a submodule
|
||||
`Let_syntax` of the form:
|
||||
|
||||
```ocaml
|
||||
module Let_syntax : sig
|
||||
val return : 'a -> 'a t
|
||||
val map : 'a t -> f:('a -> 'b) -> 'b t
|
||||
val both : 'a t -> 'b t -> ('a * 'b) t
|
||||
module Open_on_rhs : << some signature >>
|
||||
end
|
||||
```
|
||||
|
||||
The `Open_on_rhs` submodule is used by variants of `%map` and `%bind` called
|
||||
`%map_open` and `%bind_open`. It is locally opened on the right hand sides of
|
||||
the rewritten let bindings in `%map_open` and `%bind_open` expressions. For
|
||||
`match%map_open` and `match%bind_open` expressions, `Open_on_rhs` is opened for
|
||||
the expression being matched on.
|
||||
|
||||
`Open_on_rhs` is useful when programming with applicatives, which operate in a
|
||||
staged manner where the operators used to construct the applicatives are
|
||||
distinct from the operators used to manipulate the values those applicatives
|
||||
produce. For monads, `Open_on_rhs` contains `return`.
|
0
vendors/ligo-utils/simple-utils/ppx_let_generalized/dune
vendored
Normal file
0
vendors/ligo-utils/simple-utils/ppx_let_generalized/dune
vendored
Normal file
6
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/dune
vendored
Normal file
6
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/dune
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
(library
|
||||
(name ppx_let_expander)
|
||||
(public_name simple-utils.ppx_let_generalized.expander)
|
||||
(libraries base ppxlib)
|
||||
(preprocess no_preprocessing)
|
||||
)
|
155
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/ppx_let_expander.ml
vendored
Normal file
155
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/ppx_let_expander.ml
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
open Base
|
||||
open Ppxlib
|
||||
open Ast_builder.Default
|
||||
|
||||
module List = struct
|
||||
include List
|
||||
|
||||
let reduce_exn l ~f =
|
||||
match l with
|
||||
| [] -> invalid_arg "List.reduce_exn"
|
||||
| hd :: tl -> fold_left tl ~init:hd ~f
|
||||
;;
|
||||
end
|
||||
|
||||
let let_syntax ~modul : Longident.t =
|
||||
match modul with
|
||||
| None -> Lident "Let_syntax"
|
||||
| Some id -> Ldot (id.txt, "Let_syntax")
|
||||
;;
|
||||
|
||||
let open_on_rhs ~loc ~modul ~extension_name_s =
|
||||
Located.mk ~loc (Longident.Ldot (let_syntax ~modul, "Open_on_rhs_" ^ extension_name_s))
|
||||
;;
|
||||
|
||||
let eoperator ~loc ~modul func =
|
||||
let lid : Longident.t = Ldot (let_syntax ~modul, func) in
|
||||
pexp_ident ~loc (Located.mk ~loc lid)
|
||||
;;
|
||||
|
||||
let expand_with_tmp_vars ~loc bindings expr ~f =
|
||||
match bindings with
|
||||
| [ _ ] -> f ~loc bindings expr
|
||||
| _ ->
|
||||
let tmp_vars =
|
||||
List.map bindings ~f:(fun _ -> gen_symbol ~prefix:"__let_syntax" ())
|
||||
in
|
||||
let s_rhs_tmp_var (* s/rhs/tmp_var *) =
|
||||
List.map2_exn bindings tmp_vars ~f:(fun vb var ->
|
||||
{ vb with pvb_expr = evar ~loc:vb.pvb_expr.pexp_loc var })
|
||||
in
|
||||
let s_lhs_tmp_var (* s/lhs/tmp_var *) =
|
||||
List.map2_exn bindings tmp_vars ~f:(fun vb var ->
|
||||
{ vb with pvb_pat = pvar ~loc:vb.pvb_pat.ppat_loc var })
|
||||
in
|
||||
pexp_let ~loc Nonrecursive s_lhs_tmp_var (f ~loc s_rhs_tmp_var expr)
|
||||
;;
|
||||
|
||||
let bind_apply ~loc ~modul extension_name_s ~arg ~fn =
|
||||
pexp_apply
|
||||
~loc
|
||||
(eoperator ~loc ~modul extension_name_s)
|
||||
[ Nolabel, arg; Labelled "f", fn ]
|
||||
;;
|
||||
|
||||
(* Change by Georges: Always open for all extension names. *)
|
||||
let maybe_open ~to_open:module_to_open expr =
|
||||
let loc = expr.pexp_loc in
|
||||
pexp_open ~loc Override (module_to_open ~loc) expr
|
||||
;;
|
||||
|
||||
let expand_let extension_name_s ~loc ~modul bindings body =
|
||||
if List.is_empty bindings
|
||||
then invalid_arg "expand_let: list of bindings must be non-empty";
|
||||
(* Build expression [both E1 (both E2 (both ...))] *)
|
||||
let nested_boths =
|
||||
let rev_boths = List.rev_map bindings ~f:(fun vb -> vb.pvb_expr) in
|
||||
List.reduce_exn rev_boths ~f:(fun acc e ->
|
||||
let loc = e.pexp_loc in
|
||||
eapply ~loc (eoperator ~loc ~modul "both") [ e; acc ])
|
||||
in
|
||||
(* Build pattern [(P1, (P2, ...))] *)
|
||||
let nested_patterns =
|
||||
let rev_patts = List.rev_map bindings ~f:(fun vb -> vb.pvb_pat) in
|
||||
List.reduce_exn rev_patts ~f:(fun acc p ->
|
||||
let loc = p.ppat_loc in
|
||||
ppat_tuple ~loc [ p; acc ])
|
||||
in
|
||||
bind_apply
|
||||
~loc
|
||||
~modul
|
||||
extension_name_s
|
||||
~arg:nested_boths
|
||||
~fn:(pexp_fun ~loc Nolabel None nested_patterns body)
|
||||
;;
|
||||
|
||||
let expand_match extension_name_s ~loc ~modul expr cases =
|
||||
bind_apply
|
||||
~loc
|
||||
~modul
|
||||
extension_name_s
|
||||
~arg:(maybe_open ~to_open:(open_on_rhs ~modul ~extension_name_s) expr)
|
||||
~fn:(pexp_function ~loc cases)
|
||||
;;
|
||||
|
||||
let expand_if extension_name ~loc expr then_ else_ =
|
||||
expand_match
|
||||
extension_name
|
||||
~loc
|
||||
expr
|
||||
[ case ~lhs:(pbool ~loc true) ~guard:None ~rhs:then_
|
||||
; case ~lhs:(pbool ~loc false) ~guard:None ~rhs:else_
|
||||
]
|
||||
;;
|
||||
|
||||
let expand ~modul extension_name_s expr =
|
||||
let loc = expr.pexp_loc in
|
||||
let expansion =
|
||||
match expr.pexp_desc with
|
||||
| Pexp_let (Nonrecursive, bindings, expr) ->
|
||||
let bindings =
|
||||
List.map bindings ~f:(fun vb ->
|
||||
let pvb_pat =
|
||||
(* Temporary hack tentatively detecting that the parser
|
||||
has expanded `let x : t = e` into `let x : t = (e : t)`.
|
||||
|
||||
For reference, here is the relevant part of the parser:
|
||||
https://github.com/ocaml/ocaml/blob/4.07/parsing/parser.mly#L1628 *)
|
||||
match vb.pvb_pat.ppat_desc, vb.pvb_expr.pexp_desc with
|
||||
| ( Ppat_constraint (p, { ptyp_desc = Ptyp_poly ([], t1); _ })
|
||||
, Pexp_constraint (_, t2) )
|
||||
when phys_equal t1 t2 -> p
|
||||
| _ -> vb.pvb_pat
|
||||
in
|
||||
{ vb with
|
||||
pvb_pat
|
||||
; pvb_expr =
|
||||
maybe_open ~to_open:(open_on_rhs ~modul ~extension_name_s) vb.pvb_expr
|
||||
})
|
||||
in
|
||||
expand_with_tmp_vars ~loc bindings expr ~f:(expand_let extension_name_s ~modul)
|
||||
| Pexp_let (Recursive, _, _) ->
|
||||
Location.raise_errorf
|
||||
~loc
|
||||
"'let%%%s' may not be recursive"
|
||||
extension_name_s
|
||||
| Pexp_match (expr, cases) -> expand_match extension_name_s ~loc ~modul expr cases
|
||||
| Pexp_ifthenelse (expr, then_, else_) ->
|
||||
let else_ =
|
||||
match else_ with
|
||||
| Some else_ -> else_
|
||||
| None ->
|
||||
Location.raise_errorf
|
||||
~loc
|
||||
"'if%%%s' must include an else branch"
|
||||
extension_name_s
|
||||
in
|
||||
expand_if extension_name_s ~loc ~modul expr then_ else_
|
||||
| _ ->
|
||||
Location.raise_errorf
|
||||
~loc
|
||||
"'%%%s' can only be used with 'let', 'match', and 'if'"
|
||||
extension_name_s
|
||||
in
|
||||
{ expansion with pexp_attributes = expr.pexp_attributes @ expansion.pexp_attributes }
|
||||
;;
|
3
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/ppx_let_expander.mli
vendored
Normal file
3
vendors/ligo-utils/simple-utils/ppx_let_generalized/expander/ppx_let_expander.mli
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
open Ppxlib
|
||||
|
||||
val expand : modul:longident loc option -> string -> expression -> expression
|
7
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/dune
vendored
Normal file
7
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/dune
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
(library
|
||||
(name ppx_let)
|
||||
(public_name simple-utils.ppx_let_generalized)
|
||||
(kind ppx_rewriter)
|
||||
(libraries ppxlib ppx_let_expander)
|
||||
(preprocess no_preprocessing)
|
||||
)
|
19
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/ppx_let.ml
vendored
Normal file
19
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/ppx_let.ml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
open Ppxlib
|
||||
|
||||
let ext extension_name_s =
|
||||
Extension.declare_with_path_arg
|
||||
extension_name_s
|
||||
Extension.Context.expression
|
||||
Ast_pattern.(single_expr_payload __)
|
||||
(fun ~loc:_ ~path:_ ~arg expr ->
|
||||
Ppx_let_expander.expand extension_name_s ~modul:arg expr)
|
||||
;;
|
||||
|
||||
let () =
|
||||
Driver.register_transformation
|
||||
"let"
|
||||
~extensions:(List.map ext [
|
||||
"bind";
|
||||
"xxx";
|
||||
])
|
||||
;;
|
1
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/ppx_let.mli
vendored
Normal file
1
vendors/ligo-utils/simple-utils/ppx_let_generalized/src/ppx_let.mli
vendored
Normal file
@ -0,0 +1 @@
|
||||
|
1
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/dune
vendored
Normal file
1
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/dune
vendored
Normal file
@ -0,0 +1 @@
|
||||
(executables (names test) (preprocess (pps ppx_let_generalized)))
|
27
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/test-locations.mlt
vendored
Normal file
27
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/test-locations.mlt
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
(* -*- tuareg -*- *)
|
||||
|
||||
module Let_syntax = struct
|
||||
type 'a t = T of 'a
|
||||
|
||||
let map (T x) ~f = T (f x)
|
||||
let both (T x) (T y) = T (x, y)
|
||||
|
||||
module Open_on_rhs = struct
|
||||
let return x = T x
|
||||
let f x ~(doc : string) = T (x, doc)
|
||||
end
|
||||
end
|
||||
|
||||
let _ =
|
||||
[%map_open
|
||||
let x = return 42
|
||||
and y = f 42 in
|
||||
()]
|
||||
;;
|
||||
|
||||
[%%expect
|
||||
{|
|
||||
Line _, characters 12-16:
|
||||
Error: This expression has type doc:string -> (int * string) Let_syntax.t
|
||||
but an expression was expected of type 'a Let_syntax.t
|
||||
|}]
|
189
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/test.ml
vendored
Normal file
189
vendors/ligo-utils/simple-utils/ppx_let_generalized/test/test.ml
vendored
Normal file
@ -0,0 +1,189 @@
|
||||
module Monad_example = struct
|
||||
module Let_syntax = struct
|
||||
let bind x ~f = f x
|
||||
module Open_on_rhs_bind = struct
|
||||
let return _ = "foo"
|
||||
end
|
||||
end
|
||||
|
||||
let _mf a =
|
||||
let%bind xyz = return a in
|
||||
(int_of_string xyz + 1)
|
||||
;;
|
||||
end
|
||||
|
||||
(* TODO: re-enable some tests *)
|
||||
|
||||
(*
|
||||
module Monad_example = struct
|
||||
module X : sig
|
||||
type 'a t
|
||||
|
||||
module Let_syntax : sig
|
||||
val return : 'a -> 'a t
|
||||
|
||||
module Let_syntax : sig
|
||||
val return : 'a -> 'a t
|
||||
val bind : 'a t -> f:('a -> 'b t) -> 'b t
|
||||
val map : 'a t -> f:('a -> 'b) -> 'b t
|
||||
val both : 'a t -> 'b t -> ('a * 'b) t
|
||||
|
||||
module Open_on_rhs : sig
|
||||
val return : 'a -> 'a t
|
||||
end
|
||||
end
|
||||
end
|
||||
end = struct
|
||||
type 'a t = 'a
|
||||
|
||||
let return x = x
|
||||
let bind x ~f = f x
|
||||
let map x ~f = f x
|
||||
let both x y = x, y
|
||||
|
||||
module Let_syntax = struct
|
||||
let return = return
|
||||
|
||||
module Let_syntax = struct
|
||||
let return = return
|
||||
let bind = bind
|
||||
let map = map
|
||||
let both = both
|
||||
|
||||
module Open_on_rhs = struct
|
||||
let return = return
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
open X.Let_syntax
|
||||
|
||||
let _mf a : _ X.t =
|
||||
let%bind_open x = a in
|
||||
return (x + 1)
|
||||
;;
|
||||
|
||||
let _mf' a b c : _ X.t =
|
||||
let%bind_open x = a
|
||||
and y = b
|
||||
and u, v = c in
|
||||
return (x + y + (u * v))
|
||||
;;
|
||||
|
||||
let _mg a : _ X.t =
|
||||
let%map x : int X.t = a in
|
||||
x + 1
|
||||
;;
|
||||
|
||||
let _mg' a b c : _ X.t =
|
||||
let%map x = a
|
||||
and y = b
|
||||
and u, v = c in
|
||||
x + y + (u * v)
|
||||
;;
|
||||
|
||||
let _mh a : _ X.t =
|
||||
match%bind_open a with
|
||||
| 0 -> return true
|
||||
| _ -> return false
|
||||
;;
|
||||
|
||||
let _mi a : _ X.t =
|
||||
match%map a with
|
||||
| 0 -> true
|
||||
| _ -> false
|
||||
;;
|
||||
|
||||
let _mif a : _ X.t = if%bind_open a then return true else return false
|
||||
let _mif' a : _ X.t = if%map a then true else false
|
||||
end
|
||||
|
||||
module Applicative_example = struct
|
||||
module X : sig
|
||||
type 'a t
|
||||
|
||||
module Let_syntax : sig
|
||||
val return : 'a -> 'a t
|
||||
|
||||
module Let_syntax : sig
|
||||
val return : 'a -> 'a t
|
||||
val map : 'a t -> f:('a -> 'b) -> 'b t
|
||||
val both : 'a t -> 'b t -> ('a * 'b) t
|
||||
|
||||
module Open_on_rhs : sig
|
||||
val flag : int t
|
||||
val anon : int t
|
||||
end
|
||||
end
|
||||
end
|
||||
end = struct
|
||||
type 'a t = 'a
|
||||
|
||||
let return x = x
|
||||
let map x ~f = f x
|
||||
let both x y = x, y
|
||||
|
||||
module Let_syntax = struct
|
||||
let return = return
|
||||
|
||||
module Let_syntax = struct
|
||||
let return = return
|
||||
let map = map
|
||||
let both = both
|
||||
|
||||
module Open_on_rhs = struct
|
||||
let flag = 66
|
||||
let anon = 77
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
open X.Let_syntax
|
||||
|
||||
(* {[
|
||||
let _af a : _ X.t =
|
||||
let%bind x = a in (* "Error: Unbound value Let_syntax.bind" *)
|
||||
return (x + 1)
|
||||
]} *)
|
||||
|
||||
(* {[
|
||||
let _af' a b c : _ X.t =
|
||||
let%bind x = a and y = b and (u, v) = c in (* "Error: Unbound value Let_syntax.bind" *)
|
||||
return (x + y + (u * v))
|
||||
]} *)
|
||||
|
||||
let _ag a : _ X.t =
|
||||
let%map x = a in
|
||||
x + 1
|
||||
;;
|
||||
|
||||
let _ag' a b c : _ X.t =
|
||||
let%map x = a
|
||||
and y = b
|
||||
and u, v = c in
|
||||
x + y + (u * v)
|
||||
;;
|
||||
|
||||
(* {[
|
||||
let _ah a : _ X.t =
|
||||
match%bind a with (* "Error: Unbound value Let_syntax.bind" *)
|
||||
| 0 -> return true
|
||||
| _ -> return false
|
||||
]} *)
|
||||
|
||||
let _ai a : _ X.t =
|
||||
match%map a with
|
||||
| 0 -> true
|
||||
| _ -> false
|
||||
;;
|
||||
end
|
||||
|
||||
module Example_without_open = struct
|
||||
let _ag a : _ Applicative_example.X.t =
|
||||
let%map.Applicative_example.X.Let_syntax x = a in
|
||||
x + 1
|
||||
;;
|
||||
end
|
||||
*)
|
128
vendors/ligo-utils/simple-utils/region.ml
vendored
Normal file
128
vendors/ligo-utils/simple-utils/region.ml
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
(* Regions of a file *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
type t = <
|
||||
start : Pos.t;
|
||||
stop : Pos.t;
|
||||
|
||||
(* Setters *)
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
set_file : string -> t;
|
||||
|
||||
(* Getters *)
|
||||
|
||||
file : string;
|
||||
pos : Pos.t * Pos.t;
|
||||
byte_pos : Lexing.position * Lexing.position;
|
||||
|
||||
(* Predicates *)
|
||||
|
||||
is_ghost : bool;
|
||||
|
||||
(* Conversions to [string] *)
|
||||
|
||||
to_string : ?file:bool -> ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
compact : ?file:bool -> ?offsets:bool -> [`Byte | `Point] -> string
|
||||
>
|
||||
|
||||
type region = t
|
||||
|
||||
type 'a reg = {region: t; value: 'a}
|
||||
|
||||
(* Injections *)
|
||||
|
||||
exception Invalid
|
||||
|
||||
let make ~(start: Pos.t) ~(stop: Pos.t) =
|
||||
if start#file <> stop#file || start#byte_offset > stop#byte_offset
|
||||
then raise Invalid
|
||||
else
|
||||
object
|
||||
val start = start
|
||||
method start = start
|
||||
val stop = stop
|
||||
method stop = stop
|
||||
|
||||
method shift_bytes len =
|
||||
let start = start#shift_bytes len
|
||||
and stop = stop#shift_bytes len
|
||||
in {< start = start; stop = stop >}
|
||||
|
||||
method shift_one_uchar len =
|
||||
let start = start#shift_one_uchar len
|
||||
and stop = stop#shift_one_uchar len
|
||||
in {< start = start; stop = stop >}
|
||||
|
||||
method set_file name =
|
||||
let start = start#set_file name
|
||||
and stop = stop#set_file name
|
||||
in {< start = start; stop = stop >}
|
||||
|
||||
(* Getters *)
|
||||
|
||||
method file = start#file
|
||||
method pos = start, stop
|
||||
method byte_pos = start#byte, stop#byte
|
||||
|
||||
(* Predicates *)
|
||||
|
||||
method is_ghost = start#is_ghost && stop#is_ghost
|
||||
|
||||
(* Conversions to strings *)
|
||||
|
||||
method to_string ?(file=true) ?(offsets=true) mode =
|
||||
let horizontal = if offsets then "character" else "column"
|
||||
and start_offset =
|
||||
if offsets then start#offset mode else start#column mode
|
||||
and stop_offset =
|
||||
if offsets then stop#offset mode else stop#column mode in
|
||||
let info =
|
||||
if file
|
||||
then sprintf "in file \"%s\", line %i, %s"
|
||||
(String.escaped start#file) start#line horizontal
|
||||
else sprintf "at line %i, %s" start#line horizontal
|
||||
in if stop#line = start#line
|
||||
then sprintf "%ss %i-%i" info start_offset stop_offset
|
||||
else sprintf "%s %i to line %i, %s %i"
|
||||
info start_offset stop#line horizontal stop_offset
|
||||
|
||||
method compact ?(file=true) ?(offsets=true) mode =
|
||||
let start_str = start#anonymous ~offsets mode
|
||||
and stop_str = stop#anonymous ~offsets mode in
|
||||
if start#file = stop#file then
|
||||
if file then sprintf "%s:%s-%s" start#file start_str stop_str
|
||||
else sprintf "%s-%s" start_str stop_str
|
||||
else sprintf "%s:%s-%s:%s" start#file start_str stop#file stop_str
|
||||
end
|
||||
|
||||
(* Special regions *)
|
||||
|
||||
let ghost = make ~start:Pos.ghost ~stop:Pos.ghost
|
||||
|
||||
let min = make ~start:Pos.min ~stop:Pos.min
|
||||
|
||||
(* Comparisons *)
|
||||
|
||||
let equal r1 r2 =
|
||||
r1#file = r2#file
|
||||
&& Pos.equal r1#start r2#start
|
||||
&& Pos.equal r1#stop r2#stop
|
||||
|
||||
let lt r1 r2 =
|
||||
r1#file = r2#file
|
||||
&& not r1#is_ghost
|
||||
&& not r2#is_ghost
|
||||
&& Pos.lt r1#start r2#start
|
||||
&& Pos.lt r1#stop r2#stop
|
||||
|
||||
let cover r1 r2 =
|
||||
if r1#is_ghost
|
||||
then r2
|
||||
else if r2#is_ghost
|
||||
then r1
|
||||
else if lt r1 r2
|
||||
then make ~start:r1#start ~stop:r2#stop
|
||||
else make ~start:r2#start ~stop:r1#stop
|
125
vendors/ligo-utils/simple-utils/region.mli
vendored
Normal file
125
vendors/ligo-utils/simple-utils/region.mli
vendored
Normal file
@ -0,0 +1,125 @@
|
||||
(* Regions of a file
|
||||
|
||||
A _region_ is a contiguous series of bytes, for example, in a text
|
||||
file. It is here denoted by the object type [t].
|
||||
|
||||
The start (included) of the region is given by the field [start],
|
||||
which is a _position_, and the end (excluded) is the position given
|
||||
by the field [stop]. The convention of including the start and
|
||||
excluding the end enables to have empty regions if, and only if,
|
||||
[start = stop]. See module [Pos] for the definition of positions.
|
||||
|
||||
The first byte of a file starts at the offset zero (that is,
|
||||
column one), and [start] is always lower than or equal to [stop],
|
||||
and they must refer to the same file.
|
||||
*)
|
||||
|
||||
type t = <
|
||||
start : Pos.t;
|
||||
stop : Pos.t;
|
||||
|
||||
(* Setters *)
|
||||
|
||||
(* The call [region#shift_bytes n] evaluates in a region that is the
|
||||
translation of region [region] of [n] bytes forward in the
|
||||
file. The call [region#shift_one_uchar n] is similar, except that
|
||||
it assumes that [n] is the number of bytes making up one unicode
|
||||
point. The call [region#set_file f] sets the file name to be
|
||||
[f]. *)
|
||||
|
||||
shift_bytes : int -> t;
|
||||
shift_one_uchar : int -> t;
|
||||
set_file : string -> t;
|
||||
|
||||
(* Getters *)
|
||||
|
||||
(* The method [file] returns the file name.
|
||||
The method [pos] returns the values of the fields [start] and [stop].
|
||||
The method [byte_pos] returns the start and end positions of the
|
||||
region at hand _interpreting them as lexing positions_, that is,
|
||||
the unit is the byte. *)
|
||||
|
||||
file : string;
|
||||
pos : Pos.t * Pos.t;
|
||||
byte_pos : Lexing.position * Lexing.position;
|
||||
|
||||
(* Predicates *)
|
||||
|
||||
is_ghost : bool;
|
||||
|
||||
(* Conversions to [string] *)
|
||||
|
||||
(* The call [region#to_string ~file ~offsets mode] evaluates in a
|
||||
string denoting the region [region].
|
||||
|
||||
The name of the file is present if, and only if, [file = true] or
|
||||
[file] is missing.
|
||||
|
||||
The positions in the file are expressed horizontal offsets if
|
||||
[offsets = true] or [offsets] is missing (the default), otherwise
|
||||
as columns.
|
||||
|
||||
If [mode = `Byte], those positions will be assumed to have bytes
|
||||
as their unit, otherwise, if [mode = `Point], they will be
|
||||
assumed to refer to code points.
|
||||
|
||||
The method [compact] has the same signature and calling
|
||||
convention as [to_string], except that the resulting string is
|
||||
more compact.
|
||||
*)
|
||||
|
||||
to_string : ?file:bool -> ?offsets:bool -> [`Byte | `Point] -> string;
|
||||
compact : ?file:bool -> ?offsets:bool -> [`Byte | `Point] -> string
|
||||
>
|
||||
|
||||
type region = t
|
||||
|
||||
type 'a reg = {region: t; value: 'a}
|
||||
|
||||
(* Constructors *)
|
||||
|
||||
(* The function [make] creates a region from two positions. If the
|
||||
positions are not properly ordered or refer to different files, the
|
||||
exception [Invalid] is raised. *)
|
||||
|
||||
exception Invalid
|
||||
|
||||
val make : start:Pos.t -> stop:Pos.t -> t
|
||||
|
||||
(* Special regions *)
|
||||
|
||||
(* To deal with ghost expressions, that is, pieces of abstract syntax
|
||||
that have not been built from excerpts of concrete syntax, we need
|
||||
_ghost regions_. The module [Pos] provides a [ghost] position, and
|
||||
we also provide a [ghost] region and, in type [t], the method
|
||||
[is_ghost] to check it. *)
|
||||
|
||||
val ghost : t (* Two [Pos.ghost] positions *)
|
||||
|
||||
(* Occasionnally, we may need a minimum region. It is here made of two
|
||||
minimal positions. *)
|
||||
|
||||
val min : t (* Two [Pos.min] positions *)
|
||||
|
||||
(* Comparisons *)
|
||||
|
||||
(* Two regions are equal if, and only if, they refer to the same file
|
||||
and their start positions are equal and their stop positions are
|
||||
equal. See [Pos.equal]. Note that [r1] and [r2] can be ghosts. *)
|
||||
|
||||
val equal : t -> t -> bool
|
||||
|
||||
(* The call [lt r1 r2] ("lower than") has the value [true] if, and
|
||||
only if, regions [r1] and [r2] refer to the same file, none is a
|
||||
ghost and the start position of [r1] is lower than that of
|
||||
[r2]. (See [Pos.lt].) *)
|
||||
|
||||
val lt : t -> t -> bool
|
||||
|
||||
(* Given two regions [r1] and [r2], we may want the region [cover r1
|
||||
r2] that covers [r1] and [r2]. We property [equal (cover r1 r2)
|
||||
(cover r2 r1)]. (In a sense, it is the maximum region, but we avoid
|
||||
that name because of the [min] function above.) If [r1] is a ghost,
|
||||
the cover is [r2], and if [r2] is a ghost, the cover is [r1]. *)
|
||||
|
||||
val cover : t -> t -> t
|
50
vendors/ligo-utils/simple-utils/simple-utils.opam
vendored
Normal file
50
vendors/ligo-utils/simple-utils/simple-utils.opam
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
opam-version: "2.0"
|
||||
name: "simple-utils"
|
||||
version: "dev"
|
||||
synopsis: "LIGO Utilities, to be used by other libraries"
|
||||
maintainer: "Galfour <ligolang@gmail.com>"
|
||||
authors: "Galfour <ligolang@gmail.com>"
|
||||
license: "MIT"
|
||||
homepage: "https://gitlab.com/ligolang/ligo-utils"
|
||||
bug-reports: "https://gitlab.com/ligolang/ligo-utils/issues"
|
||||
depends: [
|
||||
"dune"
|
||||
"base"
|
||||
"base"
|
||||
"bigstring"
|
||||
"calendar"
|
||||
"cohttp-lwt-unix"
|
||||
"cstruct"
|
||||
"ezjsonm"
|
||||
"hex"
|
||||
"hidapi"
|
||||
"ipaddr"
|
||||
"irmin"
|
||||
"js_of_ocaml"
|
||||
"lwt"
|
||||
"lwt_log"
|
||||
"mtime"
|
||||
"ocplib-endian"
|
||||
"ocp-ocamlres"
|
||||
"re"
|
||||
"rresult"
|
||||
"stdio"
|
||||
"uri"
|
||||
"uutf"
|
||||
"zarith"
|
||||
"ocplib-json-typed"
|
||||
"ocplib-json-typed-bson"
|
||||
"tezos-crypto"
|
||||
"tezos-stdlib-unix"
|
||||
"tezos-data-encoding"
|
||||
"tezos-protocol-environment"
|
||||
"tezos-protocol-alpha"
|
||||
"michelson-parser"
|
||||
# from ppx_let:
|
||||
"ocaml" {>= "4.04.2" & < "4.08.0"}
|
||||
"dune" {build & >= "1.5.1"}
|
||||
"ppxlib" {>= "0.5.0"}
|
||||
]
|
||||
build: [
|
||||
["dune" "build" "-p" name]
|
||||
]
|
15
vendors/ligo-utils/simple-utils/simple_utils.ml
vendored
Normal file
15
vendors/ligo-utils/simple-utils/simple_utils.ml
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
module Function = Function
|
||||
module Trace = Trace
|
||||
module Logger = Logger
|
||||
module PP_helpers = PP_helpers
|
||||
module Location = Location
|
||||
|
||||
module List = X_list
|
||||
module Option = X_option
|
||||
module Tuple = Tuple
|
||||
module Map = X_map
|
||||
module Dictionary = Dictionary
|
||||
module Tree = Tree
|
||||
module Region = Region
|
||||
module Pos = Pos
|
||||
|
370
vendors/ligo-utils/simple-utils/trace.ml
vendored
Normal file
370
vendors/ligo-utils/simple-utils/trace.ml
vendored
Normal file
@ -0,0 +1,370 @@
|
||||
module J = Yojson.Basic
|
||||
|
||||
type error = [`Assoc of (string * J.t) list]
|
||||
|
||||
module JSON_string_utils = struct
|
||||
let member = J.Util.member
|
||||
let string = J.Util.to_string_option
|
||||
let int = J.Util.to_int_option
|
||||
|
||||
let swap f l r = f r l
|
||||
|
||||
let unit x = Some x
|
||||
let bind f = function None -> None | Some x -> Some (f x)
|
||||
let bind2 f = fun l r -> match l, r with
|
||||
None, None -> None
|
||||
| None, Some _ -> None
|
||||
| Some _, None -> None
|
||||
| Some l, Some r -> Some (f l r)
|
||||
|
||||
let default d = function
|
||||
Some x -> x
|
||||
| None -> d
|
||||
|
||||
let string_of_int = bind string_of_int
|
||||
|
||||
let (||) l r = l |> default r
|
||||
let (|^) = bind2 (^)
|
||||
end
|
||||
|
||||
let mk_error ?(error_code : int option) ~(title : string) ?(message : string option) () =
|
||||
let collapse l =
|
||||
List.fold_left (fun acc -> function None -> acc | Some e -> e::acc) [] (List.rev l) in
|
||||
`Assoc
|
||||
(collapse
|
||||
[(match error_code with Some c -> Some ("error_code", `Int c) | None -> None);
|
||||
Some ("title", `String title);
|
||||
(match message with Some m -> Some ("message", `String m) | None -> None)])
|
||||
|
||||
|
||||
type error_thunk = unit -> error
|
||||
|
||||
type annotation = J.t (* feel free to add different annotations here. *)
|
||||
type annotation_thunk = unit -> annotation
|
||||
|
||||
type 'a result =
|
||||
Ok of 'a * annotation_thunk list
|
||||
| Errors of error_thunk list
|
||||
|
||||
let ok x = Ok (x, [])
|
||||
let fail err = Errors [err]
|
||||
|
||||
(* When passing a constant string where a thunk is expected, we wrap it with thunk, as follows:
|
||||
(thunk "some string")
|
||||
We always put the parentheses around the call, to increase grep and sed efficiency.
|
||||
|
||||
When a trace function is called, it is passed a `(fun () -> …)`.
|
||||
If the `…` is e.g. error then we write `(fun () -> error title msg ()` *)
|
||||
let thunk x () = x
|
||||
|
||||
let error title message () = mk_error ~title:(title ()) ~message:(message ()) ()
|
||||
|
||||
let simple_error str () = mk_error ~title:str ()
|
||||
|
||||
let simple_fail str = fail @@ simple_error str
|
||||
|
||||
(* To be used when wrapped by a "trace_strong" for instance *)
|
||||
let dummy_fail = simple_fail "dummy"
|
||||
|
||||
let map f = function
|
||||
| Ok (x, annotations) ->
|
||||
(match f x with
|
||||
Ok (x', annotations') -> Ok (x', annotations' @ annotations)
|
||||
| Errors _ as e' -> ignore annotations; e')
|
||||
| Errors _ as e -> e
|
||||
|
||||
let apply f = function
|
||||
| Ok (x, annotations) -> Ok (f x, annotations)
|
||||
| Errors _ as e -> e
|
||||
|
||||
let (>>?) x f = map f x
|
||||
let (>>|?) = apply
|
||||
|
||||
module Let_syntax = struct
|
||||
let bind m ~f = m >>? f
|
||||
module Open_on_rhs_bind = struct end
|
||||
end
|
||||
|
||||
let trace_strong err = function
|
||||
| Ok _ as o -> o
|
||||
| Errors _ -> Errors [err]
|
||||
|
||||
let trace err = function
|
||||
| Ok _ as o -> o
|
||||
| Errors errs -> Errors (err :: errs)
|
||||
|
||||
let trace_r err_thunk_may_fail = function
|
||||
| Ok _ as o -> o
|
||||
| Errors errs ->
|
||||
match err_thunk_may_fail () with
|
||||
| Ok (err, annotations) -> ignore annotations; Errors (err :: errs)
|
||||
| Errors errors_while_generating_error ->
|
||||
(* TODO: the complexity could be O(n*n) in the worst case,
|
||||
this should use some catenable lists. *)
|
||||
Errors (errors_while_generating_error
|
||||
@ errs)
|
||||
|
||||
let trace_f f error x =
|
||||
trace error @@ f x
|
||||
|
||||
let trace_f_2 f error x y =
|
||||
trace error @@ f x y
|
||||
|
||||
let trace_f_ez f name =
|
||||
trace_f f (error (thunk "in function") name)
|
||||
|
||||
let trace_f_2_ez f name =
|
||||
trace_f_2 f (error (thunk "in function") name)
|
||||
|
||||
let to_bool = function
|
||||
| Ok _ -> true
|
||||
| Errors _ -> false
|
||||
|
||||
let to_option = function
|
||||
| Ok (o, annotations) -> ignore annotations; Some o
|
||||
| Errors _ -> None
|
||||
|
||||
let trace_option error = function
|
||||
| None -> fail error
|
||||
| Some s -> ok s
|
||||
|
||||
let bind_map_option f = function
|
||||
| None -> ok None
|
||||
| Some s -> f s >>? fun x -> ok (Some x)
|
||||
|
||||
let rec bind_list = function
|
||||
| [] -> ok []
|
||||
| hd :: tl -> (
|
||||
hd >>? fun hd ->
|
||||
bind_list tl >>? fun tl ->
|
||||
ok @@ hd :: tl
|
||||
)
|
||||
let bind_ne_list = fun (hd , tl) ->
|
||||
hd >>? fun hd ->
|
||||
bind_list tl >>? fun tl ->
|
||||
ok @@ (hd , tl)
|
||||
|
||||
let bind_smap (s:_ X_map.String.t) =
|
||||
let open X_map.String in
|
||||
let aux k v prev =
|
||||
prev >>? fun prev' ->
|
||||
v >>? fun v' ->
|
||||
ok @@ add k v' prev' in
|
||||
fold aux s (ok empty)
|
||||
|
||||
let bind_fold_smap f init (smap : _ X_map.String.t) =
|
||||
let aux k v prev =
|
||||
prev >>? fun prev' ->
|
||||
f prev' k v
|
||||
in
|
||||
X_map.String.fold aux smap init
|
||||
|
||||
let bind_map_smap f smap = bind_smap (X_map.String.map f smap)
|
||||
|
||||
let bind_map_list f lst = bind_list (List.map f lst)
|
||||
let bind_map_ne_list : _ -> 'a X_list.Ne.t -> 'b X_list.Ne.t result = fun f lst -> bind_ne_list (X_list.Ne.map f lst)
|
||||
let bind_iter_list : (_ -> unit result) -> _ list -> unit result = fun f lst ->
|
||||
bind_map_list f lst >>? fun _ -> ok ()
|
||||
|
||||
let bind_location (x:_ Location.wrap) =
|
||||
x.wrap_content >>? fun wrap_content ->
|
||||
ok { x with wrap_content }
|
||||
|
||||
let bind_map_location f x = bind_location (Location.map f x)
|
||||
|
||||
let bind_fold_list f init lst =
|
||||
let aux x y =
|
||||
x >>? fun x ->
|
||||
f x y
|
||||
in
|
||||
List.fold_left aux (ok init) lst
|
||||
|
||||
let bind_fold_map_list = fun f acc lst ->
|
||||
let rec aux (acc , prev) f = function
|
||||
| [] -> ok (acc , prev)
|
||||
| hd :: tl ->
|
||||
f acc hd >>? fun (acc' , hd') ->
|
||||
aux (acc' , hd' :: prev) f tl
|
||||
in
|
||||
aux (acc , []) f lst >>? fun (_acc' , lst') ->
|
||||
ok @@ List.rev lst'
|
||||
|
||||
let bind_fold_map_right_list = fun f acc lst ->
|
||||
let rec aux (acc , prev) f = function
|
||||
| [] -> ok (acc , prev)
|
||||
| hd :: tl ->
|
||||
f acc hd >>? fun (acc' , hd') ->
|
||||
aux (acc' , hd' :: prev) f tl
|
||||
in
|
||||
aux (acc , []) f (List.rev lst) >>? fun (_acc' , lst') ->
|
||||
ok lst'
|
||||
|
||||
let bind_fold_right_list f init lst =
|
||||
let aux x y =
|
||||
x >>? fun x ->
|
||||
f x y
|
||||
in
|
||||
X_list.fold_right' aux (ok init) lst
|
||||
|
||||
let bind_find_map_list error f lst =
|
||||
let rec aux lst =
|
||||
match lst with
|
||||
| [] -> fail error
|
||||
| hd :: tl -> (
|
||||
match f hd with
|
||||
| Errors _ -> aux tl
|
||||
| o -> o
|
||||
)
|
||||
in
|
||||
aux lst
|
||||
|
||||
let bind_list_iter f lst =
|
||||
let aux () y = f y in
|
||||
bind_fold_list aux () lst
|
||||
|
||||
let bind_or (a, b) =
|
||||
match a with
|
||||
| Ok _ as o -> o
|
||||
| _ -> b
|
||||
|
||||
let bind_lr (type a b) ((a : a result), (b:b result)) : [`Left of a | `Right of b] result =
|
||||
match (a, b) with
|
||||
| (Ok _ as o), _ -> apply (fun x -> `Left x) o
|
||||
| _, (Ok _ as o) -> apply (fun x -> `Right x) o
|
||||
| _, Errors b -> Errors b
|
||||
|
||||
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result)) : [`Left of a | `Right of b] result =
|
||||
match a with
|
||||
| Ok _ as o -> apply (fun x -> `Left x) o
|
||||
| _ -> (
|
||||
match b() with
|
||||
| Ok _ as o -> apply (fun x -> `Right x) o
|
||||
| Errors b -> Errors b
|
||||
)
|
||||
|
||||
let bind_and (a, b) =
|
||||
a >>? fun a ->
|
||||
b >>? fun b ->
|
||||
ok (a, b)
|
||||
|
||||
let bind_pair = bind_and
|
||||
let bind_map_pair f (a, b) =
|
||||
bind_pair (f a, f b)
|
||||
|
||||
let generic_try err f =
|
||||
try (
|
||||
ok @@ f ()
|
||||
) with _ -> fail err
|
||||
|
||||
let specific_try handler f =
|
||||
try (
|
||||
ok @@ f ()
|
||||
) with exn -> fail ((handler ()) exn)
|
||||
|
||||
let sys_try f =
|
||||
let handler () = function
|
||||
| Sys_error str -> error (thunk "Sys_error") (fun () -> str)
|
||||
| exn -> raise exn
|
||||
in
|
||||
specific_try handler f
|
||||
|
||||
let sys_command command =
|
||||
sys_try (fun () -> Sys.command command) >>? function
|
||||
| 0 -> ok ()
|
||||
| n -> fail (fun () -> error (thunk "Nonzero return code") (fun () -> (string_of_int n)) ())
|
||||
|
||||
let trace_sequence f lst =
|
||||
let lazy_map_force : 'a . (unit -> 'a) list -> (unit -> 'a list) = fun l ->
|
||||
fun () ->
|
||||
List.rev @@ List.rev_map (fun a -> a ()) l in
|
||||
let rec aux acc_x acc_annotations = function
|
||||
| hd :: tl -> (
|
||||
match f hd with
|
||||
(* TODO: what should we do with the annotations? *)
|
||||
| Ok (x, annotations) -> aux (x :: acc_x) (lazy_map_force annotations :: acc_annotations) tl
|
||||
| Errors _ as errs -> errs
|
||||
)
|
||||
| [] ->
|
||||
let old_annotations () = List.map (fun a -> `List (a ())) @@ List.rev acc_annotations in
|
||||
(* Builds a JSON annotation { "type": "list"; "content": [[…], …] } *)
|
||||
let annotation = fun () -> `Assoc [("type", `String "list"); ("content", `List (old_annotations ()))]
|
||||
in Ok (List.rev acc_x, [annotation]) in
|
||||
aux [] lst
|
||||
|
||||
let json_of_error = J.to_string
|
||||
let error_pp out (e : error) =
|
||||
let open JSON_string_utils in
|
||||
let e : J.t = (match e with `Assoc _ as e -> e) in
|
||||
let message = e |> member "message" |> string in
|
||||
let title = e |> member "title" |> string || "(no title)" in
|
||||
let error_code = unit " " |^ (e |> member "error_code" |> int |> string_of_int) || "" in
|
||||
Format.fprintf out "%s" (error_code ^ ": " ^ title ^ (unit ":" |^ message || ""))
|
||||
|
||||
let error_pp_short out (e : error) =
|
||||
let open JSON_string_utils in
|
||||
let e : J.t = (match e with `Assoc _ as e -> e) in
|
||||
let title = e |> member "title" |> string || "(no title)" in
|
||||
let error_code = unit " " |^ (e |> member "error_code" |> int |> string_of_int) || "" in
|
||||
Format.fprintf out "%s" (error_code ^ ": " ^ title)
|
||||
|
||||
let errors_pp =
|
||||
Format.pp_print_list
|
||||
~pp_sep:Format.pp_print_newline
|
||||
error_pp
|
||||
|
||||
let errors_pp_short =
|
||||
Format.pp_print_list
|
||||
~pp_sep:Format.pp_print_newline
|
||||
error_pp_short
|
||||
|
||||
let pp_to_string pp () x =
|
||||
Format.fprintf Format.str_formatter "%a" pp x ;
|
||||
Format.flush_str_formatter ()
|
||||
|
||||
let errors_to_string = pp_to_string errors_pp
|
||||
|
||||
module Assert = struct
|
||||
let assert_fail ?(msg="didn't fail") = function
|
||||
| Ok _ -> simple_fail msg
|
||||
| _ -> ok ()
|
||||
|
||||
let assert_true ?(msg="not true") = function
|
||||
| true -> ok ()
|
||||
| false -> simple_fail msg
|
||||
|
||||
let assert_equal ?msg expected actual =
|
||||
assert_true ?msg (expected = actual)
|
||||
|
||||
let assert_equal_int ?msg expected actual =
|
||||
let msg =
|
||||
let default = Format.asprintf "Not equal int : expected %d, got %d" expected actual in
|
||||
X_option.unopt ~default msg in
|
||||
assert_equal ~msg expected actual
|
||||
|
||||
let assert_equal_bool ?msg expected actual =
|
||||
let msg =
|
||||
let default = Format.asprintf "Not equal bool : expected %b, got %b" expected actual in
|
||||
X_option.unopt ~default msg in
|
||||
assert_equal ~msg expected actual
|
||||
|
||||
let assert_none ?(msg="not a none") opt = match opt with
|
||||
| None -> ok ()
|
||||
| _ -> simple_fail msg
|
||||
|
||||
let assert_list_size ?(msg="lst doesn't have the right size") lst n =
|
||||
assert_true ~msg List.(length lst = n)
|
||||
|
||||
let assert_list_empty ?(msg="lst isn't empty") lst =
|
||||
assert_true ~msg List.(length lst = 0)
|
||||
|
||||
let assert_list_same_size ?(msg="lists don't have same size") a b =
|
||||
assert_true ~msg List.(length a = length b)
|
||||
|
||||
let assert_list_size_2 ~msg = function
|
||||
| [a;b] -> ok (a, b)
|
||||
| _ -> simple_fail msg
|
||||
|
||||
let assert_list_size_1 ~msg = function
|
||||
| [a] -> ok a
|
||||
| _ -> simple_fail msg
|
||||
end
|
130
vendors/ligo-utils/simple-utils/tree.ml
vendored
Normal file
130
vendors/ligo-utils/simple-utils/tree.ml
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
[@@@warning "-9"]
|
||||
|
||||
module Append = struct
|
||||
type 'a t' =
|
||||
| Leaf of 'a
|
||||
| Node of {
|
||||
a : 'a t' ;
|
||||
b : 'a t' ;
|
||||
size : int ;
|
||||
full : bool ;
|
||||
}
|
||||
|
||||
type 'a t =
|
||||
| Empty
|
||||
| Full of 'a t'
|
||||
|
||||
let node (a, b, size, full) = Node {a;b;size;full}
|
||||
|
||||
let rec exists' f = function
|
||||
| Leaf s' when f s' -> true
|
||||
| Leaf _ -> false
|
||||
| Node{a;b} -> exists' f a || exists' f b
|
||||
let exists f = function
|
||||
| Empty -> false
|
||||
| Full x -> exists' f x
|
||||
|
||||
let rec exists_path' f = function
|
||||
| Leaf x -> if f x then Some [] else None
|
||||
| Node {a;b} -> (
|
||||
match exists_path' f a with
|
||||
| Some a -> Some (false :: a)
|
||||
| None -> (
|
||||
match exists_path' f b with
|
||||
| Some b -> Some (true :: b)
|
||||
| None -> None
|
||||
)
|
||||
)
|
||||
|
||||
let exists_path f = function
|
||||
| Empty -> None
|
||||
| Full x -> exists_path' f x
|
||||
|
||||
let empty : 'a t = Empty
|
||||
|
||||
let size' = function
|
||||
| Leaf _ -> 1
|
||||
| Node {size} -> size
|
||||
|
||||
let size = function
|
||||
| Empty -> 0
|
||||
| Full x -> size' x
|
||||
|
||||
let rec append' x = function
|
||||
| Leaf e -> node (Leaf e, Leaf x, 1, true)
|
||||
| Node({full=true;size}) as n -> node(n, Leaf x, size + 1, false)
|
||||
| Node({a=Node a;b;full=false} as n) -> (
|
||||
match append' x b with
|
||||
| Node{full=false} as b -> Node{n with b}
|
||||
| Node({full=true} as b) -> Node{n with b = Node b ; full = b.size = a.size}
|
||||
| Leaf _ -> assert false
|
||||
)
|
||||
| Node{a=Leaf _;full=false} -> assert false
|
||||
|
||||
let append x = function
|
||||
| Empty -> Full (Leaf x)
|
||||
| Full t -> Full (append' x t)
|
||||
|
||||
let of_list lst =
|
||||
let rec aux = function
|
||||
| [] -> Empty
|
||||
| hd :: tl -> append hd (aux tl)
|
||||
in
|
||||
aux @@ List.rev lst
|
||||
|
||||
let rec to_list' t' =
|
||||
match t' with
|
||||
| Leaf x -> [x]
|
||||
| Node {a;b} -> (to_list' a) @ (to_list' b)
|
||||
|
||||
let to_list t =
|
||||
match t with
|
||||
| Empty -> []
|
||||
| Full x -> to_list' x
|
||||
|
||||
let rec fold' leaf node = function
|
||||
| Leaf x -> leaf x
|
||||
| Node {a;b} -> node (fold' leaf node a) (fold' leaf node b)
|
||||
|
||||
let rec fold_s' : type a b . a -> (a -> b -> a) -> b t' -> a = fun init leaf -> function
|
||||
| Leaf x -> leaf init x
|
||||
| Node {a;b} -> fold_s' (fold_s' init leaf a) leaf b
|
||||
|
||||
let fold_ne leaf node = function
|
||||
| Empty -> raise (Failure "Tree.Append.fold_ne")
|
||||
| Full x -> fold' leaf node x
|
||||
|
||||
let fold_s_ne : type a b . a -> (a -> b -> a) -> b t -> a = fun init leaf -> function
|
||||
| Empty -> raise (Failure "Tree.Append.fold_s_ne")
|
||||
| Full x -> fold_s' init leaf x
|
||||
|
||||
let fold empty leaf node = function
|
||||
| Empty -> empty
|
||||
| Full x -> fold' leaf node x
|
||||
|
||||
|
||||
let rec assoc_opt' : ('a * 'b) t' -> 'a -> 'b option = fun t k ->
|
||||
match t with
|
||||
| Leaf (k', v) when k = k' -> Some v
|
||||
| Leaf _ -> None
|
||||
| Node {a;b} -> (
|
||||
match assoc_opt' a k with
|
||||
| None -> assoc_opt' b k
|
||||
| Some v -> Some v
|
||||
)
|
||||
|
||||
let assoc_opt : ('a * 'b) t -> 'a -> 'b option = fun t k ->
|
||||
match t with
|
||||
| Empty -> None
|
||||
| Full t' -> assoc_opt' t' k
|
||||
|
||||
let rec pp' : _ -> _ -> 'a t' -> unit = fun f ppf t' ->
|
||||
match t' with
|
||||
| Leaf x -> Format.fprintf ppf "%a" f x
|
||||
| Node {a;b} -> Format.fprintf ppf "N(%a , %a)" (pp' f) a (pp' f) b
|
||||
|
||||
let pp : _ -> _ -> 'a t -> unit = fun f ppf t ->
|
||||
match t with
|
||||
| Empty -> Format.fprintf ppf "[]"
|
||||
| Full x -> Format.fprintf ppf "[%a]" (pp' f) x
|
||||
end
|
9
vendors/ligo-utils/simple-utils/tuple.ml
vendored
Normal file
9
vendors/ligo-utils/simple-utils/tuple.ml
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
let map_h_2 f g (a , b) = (f a , g b)
|
||||
let map2 f (a, b) = (f a, f b)
|
||||
let apply2 f (a, b) = f a b
|
||||
let list2 (a, b) = [a;b]
|
||||
|
||||
module Pair = struct
|
||||
let map = map2
|
||||
let apply f (a, b) = f a b
|
||||
end
|
21
vendors/ligo-utils/simple-utils/wrap.ml
vendored
Normal file
21
vendors/ligo-utils/simple-utils/wrap.ml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
module Make (P : sig type meta end) = struct
|
||||
type meta = P.meta
|
||||
type 'value t = {
|
||||
value : 'value ;
|
||||
meta : meta ;
|
||||
}
|
||||
|
||||
let make meta value = { value ; meta }
|
||||
let value t = t.value
|
||||
let meta t = t.meta
|
||||
|
||||
let apply : ('a -> 'b) -> 'a t -> 'b = fun f x -> f x.value
|
||||
end
|
||||
|
||||
module Location = struct
|
||||
include Make(struct type meta = Location.t end)
|
||||
|
||||
let make_f f : loc:_ -> _ -> _ t = fun ~loc x -> make loc (f x)
|
||||
let make ~loc x : _ t = make loc x
|
||||
let update_location ~loc t = {t with meta = loc}
|
||||
end
|
165
vendors/ligo-utils/simple-utils/x_list.ml
vendored
Normal file
165
vendors/ligo-utils/simple-utils/x_list.ml
vendored
Normal file
@ -0,0 +1,165 @@
|
||||
include List
|
||||
|
||||
let rec remove n = function
|
||||
| [] -> raise (Failure "List.remove")
|
||||
| _ :: tl when n = 0 -> tl
|
||||
| hd :: tl -> hd :: remove (n - 1) tl
|
||||
|
||||
|
||||
let map ?(acc = []) f lst =
|
||||
let rec aux acc f = function
|
||||
| [] -> acc
|
||||
| hd :: tl -> aux (f hd :: acc) f tl
|
||||
in
|
||||
aux acc f (List.rev lst)
|
||||
|
||||
let fold_map_right : type acc ele ret . (acc -> ele -> (acc * ret)) -> acc -> ele list -> ret list =
|
||||
fun f acc lst ->
|
||||
let rec aux (acc , prev) f = function
|
||||
| [] -> (acc , prev)
|
||||
| hd :: tl ->
|
||||
let (acc' , hd') = f acc hd in
|
||||
aux (acc' , hd' :: prev) f tl
|
||||
in
|
||||
snd @@ aux (acc , []) f (List.rev lst)
|
||||
|
||||
let fold_map : type acc ele ret . (acc -> ele -> (acc * ret)) -> acc -> ele list -> ret list =
|
||||
fun f acc lst ->
|
||||
let rec aux (acc , prev) f = function
|
||||
| [] -> (acc , prev)
|
||||
| hd :: tl ->
|
||||
let (acc' , hd') = f acc hd in
|
||||
aux (acc' , hd' :: prev) f tl
|
||||
in
|
||||
List.rev @@ snd @@ aux (acc , []) f lst
|
||||
|
||||
let fold_right' f init lst = List.fold_left f init (List.rev lst)
|
||||
|
||||
let rec remove_element x lst =
|
||||
match lst with
|
||||
| [] -> raise (Failure "X_list.remove_element")
|
||||
| hd :: tl when x = hd -> tl
|
||||
| hd :: tl -> hd :: remove_element x tl
|
||||
|
||||
let filter_map f =
|
||||
let rec aux acc lst = match lst with
|
||||
| [] -> List.rev acc
|
||||
| hd :: tl -> aux (
|
||||
match f hd with
|
||||
| Some x -> x :: acc
|
||||
| None -> acc
|
||||
) tl
|
||||
in
|
||||
aux []
|
||||
|
||||
let cons_iter = fun fhd ftl lst ->
|
||||
match lst with
|
||||
| [] -> ()
|
||||
| hd :: tl -> fhd hd ; List.iter ftl tl
|
||||
|
||||
let range n =
|
||||
let rec aux acc n =
|
||||
if n = 0
|
||||
then acc
|
||||
else aux ((n-1) :: acc) (n-1)
|
||||
in
|
||||
aux [] n
|
||||
|
||||
let find_map f lst =
|
||||
let rec aux = function
|
||||
| [] -> None
|
||||
| hd::tl -> (
|
||||
match f hd with
|
||||
| Some _ as s -> s
|
||||
| None -> aux tl
|
||||
)
|
||||
in
|
||||
aux lst
|
||||
|
||||
let find_index f lst =
|
||||
let rec aux n = function
|
||||
| [] -> raise (Failure "find_index")
|
||||
| hd :: _ when f hd -> n
|
||||
| _ :: tl -> aux (n + 1) tl in
|
||||
aux 0 lst
|
||||
|
||||
let find_full f lst =
|
||||
let rec aux n = function
|
||||
| [] -> raise (Failure "find_index")
|
||||
| hd :: _ when f hd -> (hd, n)
|
||||
| _ :: tl -> aux (n + 1) tl in
|
||||
aux 0 lst
|
||||
|
||||
let assoc_i x lst =
|
||||
let rec aux n = function
|
||||
| [] -> raise (Failure "List:assoc_i")
|
||||
| (x', y) :: _ when x = x' -> (y, n)
|
||||
| _ :: tl -> aux (n + 1) tl
|
||||
in
|
||||
aux 0 lst
|
||||
|
||||
let rec from n lst =
|
||||
if n = 0
|
||||
then lst
|
||||
else from (n - 1) (tl lst)
|
||||
|
||||
let until n lst =
|
||||
let rec aux acc n lst =
|
||||
if n = 0
|
||||
then acc
|
||||
else aux ((hd lst) :: acc) (n - 1) (tl lst)
|
||||
in
|
||||
rev (aux [] n lst)
|
||||
|
||||
let uncons_opt = function
|
||||
| [] -> None
|
||||
| hd :: tl -> Some (hd, tl)
|
||||
|
||||
let rev_uncons_opt = function
|
||||
| [] -> None
|
||||
| lst ->
|
||||
let r = rev lst in
|
||||
let last = hd r in
|
||||
let hds = rev @@ tl r in
|
||||
Some (hds , last)
|
||||
|
||||
let hds lst = match rev_uncons_opt lst with
|
||||
| None -> failwith "toto"
|
||||
| Some (hds , _) -> hds
|
||||
|
||||
let to_pair = function
|
||||
| [a ; b] -> Some (a , b)
|
||||
| _ -> None
|
||||
|
||||
let to_singleton = function
|
||||
| [a] -> Some a
|
||||
| _ -> None
|
||||
|
||||
module Ne = struct
|
||||
|
||||
type 'a t = 'a * 'a list
|
||||
|
||||
let of_list lst = List.(hd lst, tl lst)
|
||||
let to_list (hd, tl : _ t) = hd :: tl
|
||||
let singleton hd : 'a t = hd , []
|
||||
let hd : 'a t -> 'a = fst
|
||||
let cons : 'a -> 'a t -> 'a t = fun hd' (hd , tl) -> hd' , hd :: tl
|
||||
let iter f (hd, tl : _ t) = f hd ; List.iter f tl
|
||||
let map f (hd, tl : _ t) = f hd, List.map f tl
|
||||
let hd_map : _ -> 'a t -> 'a t = fun f (hd , tl) -> (f hd , tl)
|
||||
let mapi f (hd, tl : _ t) =
|
||||
let lst = List.mapi f (hd::tl) in
|
||||
of_list lst
|
||||
let concat (hd, tl : _ t) = hd @ List.concat tl
|
||||
let rev (hd, tl : _ t) =
|
||||
match tl with
|
||||
| [] -> (hd, [])
|
||||
| lst ->
|
||||
let r = List.rev lst in
|
||||
(List.hd r, List.tl r @ [hd])
|
||||
let find_map = fun f (hd , tl : _ t) ->
|
||||
match f hd with
|
||||
| Some x -> Some x
|
||||
| None -> find_map f tl
|
||||
|
||||
end
|
27
vendors/ligo-utils/simple-utils/x_map.ml
vendored
Normal file
27
vendors/ligo-utils/simple-utils/x_map.ml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
module type OrderedType = Map.OrderedType
|
||||
|
||||
module type S = sig
|
||||
include Map.S
|
||||
|
||||
val of_list : (key * 'a) list -> 'a t
|
||||
val to_list : 'a t -> 'a list
|
||||
val to_kv_list : 'a t -> (key * 'a) list
|
||||
end
|
||||
|
||||
module Make(Ord : Map.OrderedType) : S with type key = Ord.t = struct
|
||||
include Map.Make(Ord)
|
||||
|
||||
let of_list (lst: (key * 'a) list) : 'a t =
|
||||
let aux prev (k, v) = add k v prev in
|
||||
List.fold_left aux empty lst
|
||||
|
||||
let to_list (t: 'a t) : 'a list =
|
||||
let aux _k v prev = v :: prev in
|
||||
fold aux t []
|
||||
|
||||
let to_kv_list (t: 'a t) : (key * 'a) list =
|
||||
let aux k v prev = (k, v) :: prev in
|
||||
fold aux t []
|
||||
end
|
||||
|
||||
module String = Make(String)
|
59
vendors/ligo-utils/simple-utils/x_option.ml
vendored
Normal file
59
vendors/ligo-utils/simple-utils/x_option.ml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
let (>>=) x f = match x with
|
||||
| None -> None
|
||||
| Some x -> f x
|
||||
|
||||
let first_some = fun a b -> match (a , b) with
|
||||
| Some a , _ -> Some a
|
||||
| _ , Some b -> Some b
|
||||
| _ -> None
|
||||
|
||||
let unopt ~default x = match x with
|
||||
| None -> default
|
||||
| Some x -> x
|
||||
|
||||
let unopt_exn x = match x with
|
||||
| None -> raise Not_found
|
||||
| Some x -> x
|
||||
|
||||
let map ~f x = match x with
|
||||
| Some x -> Some (f x)
|
||||
| None -> None
|
||||
|
||||
let lr (a , b) = match (a , b) with
|
||||
| Some x , _ -> Some (`Left x)
|
||||
| None , Some x -> Some (`Right x)
|
||||
| _ -> None
|
||||
|
||||
(* TODO: recursive terminal *)
|
||||
let rec bind_list = fun lst ->
|
||||
match lst with
|
||||
| [] -> Some []
|
||||
| hd :: tl -> (
|
||||
match hd with
|
||||
| None -> None
|
||||
| Some hd' -> (
|
||||
match bind_list tl with
|
||||
| None -> None
|
||||
| Some tl' -> Some (hd' :: tl')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
let bind_pair = fun (a , b) ->
|
||||
a >>= fun a' ->
|
||||
b >>= fun b' ->
|
||||
Some (a' , b')
|
||||
|
||||
let bind_map_list = fun f lst -> bind_list (X_list.map f lst)
|
||||
|
||||
let bind_map_pair = fun f (a , b) -> bind_pair (f a , f b)
|
||||
|
||||
let bind_smap (s:_ X_map.String.t) =
|
||||
let open X_map.String in
|
||||
let aux k v prev =
|
||||
prev >>= fun prev' ->
|
||||
v >>= fun v' ->
|
||||
Some (add k v' prev') in
|
||||
fold aux s (Some empty)
|
||||
|
||||
let bind_map_smap f smap = bind_smap (X_map.String.map f smap)
|
12
vendors/ligo-utils/tezos-utils/dune
vendored
Normal file
12
vendors/ligo-utils/tezos-utils/dune
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
(library
|
||||
(name tezos_utils)
|
||||
(public_name tezos-utils)
|
||||
(libraries
|
||||
tezos-error-monad
|
||||
tezos-stdlib-unix
|
||||
tezos-memory-proto-alpha
|
||||
simple-utils
|
||||
michelson-parser
|
||||
)
|
||||
(flags (:standard -open Simple_utils ))
|
||||
)
|
1
vendors/ligo-utils/tezos-utils/dune-project
vendored
Normal file
1
vendors/ligo-utils/tezos-utils/dune-project
vendored
Normal file
@ -0,0 +1 @@
|
||||
(lang dune 1.6)
|
10
vendors/ligo-utils/tezos-utils/michelson-parser/dune
vendored
Normal file
10
vendors/ligo-utils/tezos-utils/michelson-parser/dune
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
(library
|
||||
(name michelson_parser)
|
||||
(public_name michelson-parser)
|
||||
(libraries
|
||||
tezos-base
|
||||
tezos-memory-proto-alpha
|
||||
)
|
||||
(flags (:standard -w -9-32 -safe-string
|
||||
-open Tezos_base__TzPervasives
|
||||
)))
|
1
vendors/ligo-utils/tezos-utils/michelson-parser/dune-project
vendored
Normal file
1
vendors/ligo-utils/tezos-utils/michelson-parser/dune-project
vendored
Normal file
@ -0,0 +1 @@
|
||||
(lang dune 1.6)
|
16
vendors/ligo-utils/tezos-utils/michelson-parser/michelson-parser.opam
vendored
Normal file
16
vendors/ligo-utils/tezos-utils/michelson-parser/michelson-parser.opam
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "michelson-parser"
|
||||
opam-version: "2.0"
|
||||
maintainer: "ligolang@gmail.com"
|
||||
authors: [ "Galfour" ]
|
||||
homepage: "https://gitlab.com/ligolang/tezos"
|
||||
bug-reports: "https://gitlab.com/ligolang/tezos/issues"
|
||||
dev-repo: "git+https://gitlab.com/ligolang/tezos.git"
|
||||
license: "MIT"
|
||||
depends: [
|
||||
"ocamlfind" { build }
|
||||
"dune"
|
||||
"tezos-memory-proto-alpha"
|
||||
]
|
||||
build: [
|
||||
[ "dune" "build" "-p" name "-j" jobs ]
|
||||
]
|
1176
vendors/ligo-utils/tezos-utils/michelson-parser/michelson_v1_macros.ml
vendored
Normal file
1176
vendors/ligo-utils/tezos-utils/michelson-parser/michelson_v1_macros.ml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
62
vendors/ligo-utils/tezos-utils/michelson-parser/michelson_v1_macros.mli
vendored
Normal file
62
vendors/ligo-utils/tezos-utils/michelson-parser/michelson_v1_macros.mli
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
(*****************************************************************************)
|
||||
(* *)
|
||||
(* Open Source License *)
|
||||
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||
(* *)
|
||||
(* Permission is hereby granted, free of charge, to any person obtaining a *)
|
||||
(* copy of this software and associated documentation files (the "Software"),*)
|
||||
(* to deal in the Software without restriction, including without limitation *)
|
||||
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
|
||||
(* and/or sell copies of the Software, and to permit persons to whom the *)
|
||||
(* Software is furnished to do so, subject to the following conditions: *)
|
||||
(* *)
|
||||
(* The above copyright notice and this permission notice shall be included *)
|
||||
(* in all copies or substantial portions of the Software. *)
|
||||
(* *)
|
||||
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
|
||||
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
|
||||
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
|
||||
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
|
||||
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
|
||||
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
|
||||
(* DEALINGS IN THE SOFTWARE. *)
|
||||
(* *)
|
||||
(*****************************************************************************)
|
||||
|
||||
open Tezos_micheline
|
||||
|
||||
type 'l node = ('l, string) Micheline.node
|
||||
|
||||
type error += Unexpected_macro_annotation of string
|
||||
type error += Sequence_expected of string
|
||||
type error += Invalid_arity of string * int * int
|
||||
|
||||
val expand : 'l node -> 'l node tzresult
|
||||
val expand_rec : 'l node -> 'l node * error list
|
||||
|
||||
val expand_caddadr : 'l node -> 'l node option tzresult
|
||||
val expand_set_caddadr : 'l node -> 'l node option tzresult
|
||||
val expand_map_caddadr : 'l node -> 'l node option tzresult
|
||||
val expand_dxiiivp : 'l node -> 'l node option tzresult
|
||||
val expand_pappaiir : 'l node -> 'l node option tzresult
|
||||
val expand_duuuuup : 'l node -> 'l node option tzresult
|
||||
val expand_compare : 'l node -> 'l node option tzresult
|
||||
val expand_asserts : 'l node -> 'l node option tzresult
|
||||
val expand_unpappaiir : 'l node -> 'l node option tzresult
|
||||
val expand_if_some : 'l node -> 'l node option tzresult
|
||||
val expand_if_right : 'l node -> 'l node option tzresult
|
||||
|
||||
val unexpand : 'l node -> 'l node
|
||||
val unexpand_rec : 'l node -> 'l node
|
||||
|
||||
val unexpand_caddadr : 'l node -> 'l node option
|
||||
val unexpand_set_caddadr : 'l node -> 'l node option
|
||||
val unexpand_map_caddadr : 'l node -> 'l node option
|
||||
val unexpand_dxiiivp : 'l node -> 'l node option
|
||||
val unexpand_pappaiir : 'l node -> 'l node option
|
||||
val unexpand_duuuuup : 'l node -> 'l node option
|
||||
val unexpand_compare : 'l node -> 'l node option
|
||||
val unexpand_asserts : 'l node -> 'l node option
|
||||
val unexpand_unpappaiir : 'l node -> 'l node option
|
||||
val unexpand_if_some : 'l node -> 'l node option
|
||||
val unexpand_if_right : 'l node -> 'l node option
|
91
vendors/ligo-utils/tezos-utils/michelson-parser/v1.ml
vendored
Normal file
91
vendors/ligo-utils/tezos-utils/michelson-parser/v1.ml
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
(*****************************************************************************)
|
||||
(* *)
|
||||
(* Open Source License *)
|
||||
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||
(* *)
|
||||
(* Permission is hereby granted, free of charge, to any person obtaining a *)
|
||||
(* copy of this software and associated documentation files (the "Software"),*)
|
||||
(* to deal in the Software without restriction, including without limitation *)
|
||||
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
|
||||
(* and/or sell copies of the Software, and to permit persons to whom the *)
|
||||
(* Software is furnished to do so, subject to the following conditions: *)
|
||||
(* *)
|
||||
(* The above copyright notice and this permission notice shall be included *)
|
||||
(* in all copies or substantial portions of the Software. *)
|
||||
(* *)
|
||||
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
|
||||
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
|
||||
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
|
||||
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
|
||||
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
|
||||
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
|
||||
(* DEALINGS IN THE SOFTWARE. *)
|
||||
(* *)
|
||||
(*****************************************************************************)
|
||||
|
||||
open Memory_proto_alpha
|
||||
open Tezos_micheline
|
||||
open Micheline_parser
|
||||
open Micheline
|
||||
|
||||
type parsed =
|
||||
{ source : string ;
|
||||
unexpanded : string canonical ;
|
||||
expanded : Michelson_v1_primitives.prim canonical ;
|
||||
expansion_table : (int * (Micheline_parser.location * int list)) list ;
|
||||
unexpansion_table : (int * int) list }
|
||||
|
||||
(* Unexpanded toplevel expression should be a sequence *)
|
||||
let expand_all source ast errors =
|
||||
let unexpanded, loc_table =
|
||||
extract_locations ast in
|
||||
let expanded, expansion_errors =
|
||||
Michelson_v1_macros.expand_rec (root unexpanded) in
|
||||
let expanded, unexpansion_table =
|
||||
extract_locations expanded in
|
||||
let expansion_table =
|
||||
let sorted =
|
||||
List.sort (fun (_, a) (_, b) -> compare a b) unexpansion_table in
|
||||
let grouped =
|
||||
let rec group = function
|
||||
| acc, [] -> acc
|
||||
| [], (u, e) :: r ->
|
||||
group ([ (e, [ u ]) ], r)
|
||||
| ((pe, us) :: racc as acc), (u, e) :: r ->
|
||||
if e = pe then
|
||||
group (((e, u :: us) :: racc), r)
|
||||
else
|
||||
group (((e, [ u ]) :: acc), r) in
|
||||
group ([], sorted) in
|
||||
List.map2
|
||||
(fun (l, ploc) (l', elocs) ->
|
||||
assert (l = l') ;
|
||||
(l, (ploc, elocs)))
|
||||
(List.sort compare loc_table)
|
||||
(List.sort compare grouped) in
|
||||
match Alpha_environment.wrap_error (Michelson_v1_primitives.prims_of_strings expanded) with
|
||||
| Ok expanded ->
|
||||
{ source ; unexpanded ; expanded ;
|
||||
expansion_table ; unexpansion_table },
|
||||
errors @ expansion_errors
|
||||
| Error errs ->
|
||||
{ source ; unexpanded ;
|
||||
expanded = Micheline.strip_locations (Seq ((), [])) ;
|
||||
expansion_table ; unexpansion_table },
|
||||
errors @ expansion_errors @ errs
|
||||
|
||||
let parse_toplevel ?check source =
|
||||
let tokens, lexing_errors = Micheline_parser.tokenize source in
|
||||
let asts, parsing_errors = Micheline_parser.parse_toplevel ?check tokens in
|
||||
let ast =
|
||||
let start = min_point asts and stop = max_point asts in
|
||||
Seq ({ start ; stop }, asts) in
|
||||
expand_all source ast (lexing_errors @ parsing_errors)
|
||||
|
||||
let parse_expression ?check source =
|
||||
let tokens, lexing_errors = Micheline_parser.tokenize source in
|
||||
let ast, parsing_errors = Micheline_parser.parse_expression ?check tokens in
|
||||
expand_all source ast (lexing_errors @ parsing_errors)
|
||||
|
||||
let expand_all ~source ~original =
|
||||
expand_all source original []
|
51
vendors/ligo-utils/tezos-utils/michelson-parser/v1.mli
vendored
Normal file
51
vendors/ligo-utils/tezos-utils/michelson-parser/v1.mli
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
(*****************************************************************************)
|
||||
(* *)
|
||||
(* Open Source License *)
|
||||
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||
(* *)
|
||||
(* Permission is hereby granted, free of charge, to any person obtaining a *)
|
||||
(* copy of this software and associated documentation files (the "Software"),*)
|
||||
(* to deal in the Software without restriction, including without limitation *)
|
||||
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
|
||||
(* and/or sell copies of the Software, and to permit persons to whom the *)
|
||||
(* Software is furnished to do so, subject to the following conditions: *)
|
||||
(* *)
|
||||
(* The above copyright notice and this permission notice shall be included *)
|
||||
(* in all copies or substantial portions of the Software. *)
|
||||
(* *)
|
||||
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
|
||||
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
|
||||
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
|
||||
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
|
||||
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
|
||||
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
|
||||
(* DEALINGS IN THE SOFTWARE. *)
|
||||
(* *)
|
||||
(*****************************************************************************)
|
||||
|
||||
open Memory_proto_alpha
|
||||
open Alpha_context
|
||||
|
||||
open Tezos_micheline
|
||||
|
||||
(** The result of parsing and expanding a Michelson V1 script or data. *)
|
||||
type parsed =
|
||||
{
|
||||
source : string ;
|
||||
(** The original source code. *)
|
||||
unexpanded : string Micheline.canonical ;
|
||||
(** Original expression with macros. *)
|
||||
expanded : Script.expr ;
|
||||
(** Expression with macros fully expanded. *)
|
||||
expansion_table :
|
||||
(int * (Micheline_parser.location * int list)) list ;
|
||||
(** Associates unexpanded nodes to their parsing locations and
|
||||
the nodes expanded from it in the expanded expression. *)
|
||||
unexpansion_table : (int * int) list ;
|
||||
(** Associates an expanded node to its source in the unexpanded
|
||||
expression. *)
|
||||
}
|
||||
|
||||
val parse_toplevel : ?check:bool -> string -> parsed Micheline_parser.parsing_result
|
||||
val parse_expression : ?check:bool -> string -> parsed Micheline_parser.parsing_result
|
||||
val expand_all : source:string -> original:Micheline_parser.node -> parsed Micheline_parser.parsing_result
|
51
vendors/ligo-utils/tezos-utils/tezos-utils.opam
vendored
Normal file
51
vendors/ligo-utils/tezos-utils/tezos-utils.opam
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
opam-version: "2.0"
|
||||
name: "tezos-utils"
|
||||
version: "dev"
|
||||
synopsis: "LIGO Tezos specific Utilities, to be used by other libraries"
|
||||
maintainer: "Galfour <ligolang@gmail.com>"
|
||||
authors: "Galfour <ligolang@gmail.com>"
|
||||
license: "MIT"
|
||||
homepage: "https://gitlab.com/ligolang/ligo-utils"
|
||||
bug-reports: "https://gitlab.com/ligolang/ligo-utils/issues"
|
||||
depends: [
|
||||
"dune"
|
||||
"base"
|
||||
"base"
|
||||
"bigstring"
|
||||
"calendar"
|
||||
"cohttp-lwt-unix"
|
||||
"cstruct"
|
||||
"ezjsonm"
|
||||
"hex"
|
||||
"hidapi"
|
||||
"ipaddr"
|
||||
"irmin"
|
||||
"js_of_ocaml"
|
||||
"lwt"
|
||||
"lwt_log"
|
||||
"mtime"
|
||||
"ocplib-endian"
|
||||
"ocp-ocamlres"
|
||||
"re"
|
||||
"rresult"
|
||||
"stdio"
|
||||
"uri"
|
||||
"uutf"
|
||||
"zarith"
|
||||
"ocplib-json-typed"
|
||||
"ocplib-json-typed-bson"
|
||||
"tezos-crypto"
|
||||
"tezos-stdlib-unix"
|
||||
"tezos-data-encoding"
|
||||
"tezos-protocol-environment"
|
||||
"tezos-protocol-alpha"
|
||||
"michelson-parser"
|
||||
"simple-utils"
|
||||
# from ppx_let:
|
||||
"ocaml" {>= "4.04.2" & < "4.08.0"}
|
||||
"dune" {build & >= "1.5.1"}
|
||||
"ppxlib" {>= "0.5.0"}
|
||||
]
|
||||
build: [
|
||||
["dune" "build" "-p" name]
|
||||
]
|
8
vendors/ligo-utils/tezos-utils/tezos_utils.ml
vendored
Normal file
8
vendors/ligo-utils/tezos-utils/tezos_utils.ml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
module Stdlib_unix = Tezos_stdlib_unix
|
||||
module Data_encoding = Tezos_data_encoding
|
||||
module Crypto = Tezos_crypto
|
||||
module Signature = Tezos_base.TzPervasives.Signature
|
||||
module Time = Tezos_base.TzPervasives.Time
|
||||
module Micheline = Tezos_micheline
|
||||
module Michelson = X_michelson
|
||||
module Error_monad = X_error_monad
|
25
vendors/ligo-utils/tezos-utils/x_error_monad.ml
vendored
Normal file
25
vendors/ligo-utils/tezos-utils/x_error_monad.ml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
module Error_monad = Tezos_error_monad.Error_monad
|
||||
|
||||
let to_string err =
|
||||
let json = Error_monad.json_of_error err in
|
||||
Tezos_data_encoding.Json.to_string json
|
||||
|
||||
let print err =
|
||||
Format.printf "%s\n" @@ to_string err
|
||||
|
||||
let force_ok ?(msg = "") = function
|
||||
| Ok x -> x
|
||||
| Error errs ->
|
||||
Format.printf "Errors :\n";
|
||||
List.iter print errs ;
|
||||
raise @@ Failure ("force_ok : " ^ msg)
|
||||
|
||||
let is_ok = function
|
||||
| Ok _ -> true
|
||||
| Error _ -> false
|
||||
|
||||
let force_ok_str ?(msg = "") = function
|
||||
| Ok x -> x
|
||||
| Error err ->
|
||||
Format.printf "Error : %s\n" err;
|
||||
raise @@ Failure ("force_ok : " ^ msg)
|
94
vendors/ligo-utils/tezos-utils/x_michelson.ml
vendored
Normal file
94
vendors/ligo-utils/tezos-utils/x_michelson.ml
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
open Tezos_micheline
|
||||
open Micheline
|
||||
|
||||
include Michelson_primitives
|
||||
|
||||
type michelson = (int, prim) node
|
||||
type t = michelson
|
||||
|
||||
let prim ?(annot=[]) ?(children=[]) p : michelson =
|
||||
Prim (0, p, children, annot)
|
||||
|
||||
let annotate annot = function
|
||||
| Prim (l, p, c, []) -> Prim (l, p, c, [annot])
|
||||
| _ -> raise (Failure "annotate")
|
||||
|
||||
let seq s : michelson = Seq (0, s)
|
||||
|
||||
let i_comment s : michelson = prim ~annot:["\"" ^ s ^ "\""] I_NOP
|
||||
|
||||
let contract parameter storage code =
|
||||
seq [
|
||||
prim ~children:[parameter] K_parameter ;
|
||||
prim ~children:[storage] K_storage ;
|
||||
prim ~children:[code] K_code ;
|
||||
]
|
||||
|
||||
let int n : michelson = Int (0, n)
|
||||
let string s : michelson = String (0, s)
|
||||
let bytes s : michelson = Bytes (0, s)
|
||||
|
||||
let t_unit = prim T_unit
|
||||
let t_string = prim T_string
|
||||
let t_pair a b = prim ~children:[a;b] T_pair
|
||||
let t_lambda a b = prim ~children:[a;b] T_lambda
|
||||
|
||||
let d_unit = prim D_Unit
|
||||
let d_pair a b = prim ~children:[a;b] D_Pair
|
||||
|
||||
let i_dup = prim I_DUP
|
||||
let i_car = prim I_CAR
|
||||
let i_cdr = prim I_CDR
|
||||
let i_pair = prim I_PAIR
|
||||
let i_swap = prim I_SWAP
|
||||
let i_piar = seq [ i_swap ; i_pair ]
|
||||
let i_push ty code = prim ~children:[ty;code] I_PUSH
|
||||
let i_push_unit = i_push t_unit d_unit
|
||||
let i_push_string str = i_push t_string (string str)
|
||||
let i_none ty = prim ~children:[ty] I_NONE
|
||||
let i_nil ty = prim ~children:[ty] I_NIL
|
||||
let i_some = prim I_SOME
|
||||
let i_lambda arg ret body = prim ~children:[arg;ret;body] I_LAMBDA
|
||||
let i_empty_map src dst = prim ~children:[src;dst] I_EMPTY_MAP
|
||||
let i_drop = prim I_DROP
|
||||
let i_exec = prim I_EXEC
|
||||
|
||||
let i_if a b = prim ~children:[seq [a] ; seq[b]] I_IF
|
||||
let i_if_none a b = prim ~children:[seq [a] ; seq[b]] I_IF_NONE
|
||||
let i_if_left a b = prim ~children:[seq [a] ; seq[b]] I_IF_LEFT
|
||||
let i_failwith = prim I_FAILWITH
|
||||
let i_assert_some = i_if_none (seq [i_push_string "ASSERT_SOME" ; i_failwith]) (seq [])
|
||||
let i_assert_some_msg msg = i_if_none (seq [msg ; i_failwith]) (seq [])
|
||||
|
||||
let dip code : michelson = prim ~children:[seq [code]] I_DIP
|
||||
let i_unpair = seq [i_dup ; i_car ; dip i_cdr]
|
||||
let i_unpiar = seq [i_dup ; i_cdr ; dip i_car]
|
||||
|
||||
let rec strip_annots : michelson -> michelson = function
|
||||
| Seq(l, s) -> Seq(l, List.map strip_annots s)
|
||||
| Prim (l, p, lst, _) -> Prim (l, p, List.map strip_annots lst, [])
|
||||
| x -> x
|
||||
|
||||
let rec strip_nops : michelson -> michelson = function
|
||||
| Seq(l, s) -> Seq(l, List.map strip_nops s)
|
||||
| Prim (l, I_NOP, _, _) -> Seq (l, [])
|
||||
| Prim (l, p, lst, a) -> Prim (l, p, List.map strip_nops lst, a)
|
||||
| x -> x
|
||||
|
||||
let pp ppf (michelson:michelson) =
|
||||
let open Micheline_printer in
|
||||
let canonical = strip_locations michelson in
|
||||
let node = printable string_of_prim canonical in
|
||||
print_expr ppf node
|
||||
|
||||
let pp_stripped ppf (michelson:michelson) =
|
||||
let open Micheline_printer in
|
||||
let michelson' = strip_nops @@ strip_annots michelson in
|
||||
let canonical = strip_locations michelson' in
|
||||
let node = printable string_of_prim canonical in
|
||||
print_expr ppf node
|
||||
|
||||
let pp_naked ppf m =
|
||||
let naked = strip_annots m in
|
||||
pp ppf naked
|
||||
|
Loading…
Reference in New Issue
Block a user