Shell: refactor the distributed DB.
This refactors `src/node/shell/state.ml` in order to trace the source of blocks and operations. This prepares the node for the three-pass validator. In the procces, it adds an in-memory overlay for blocks and operations.
This commit is contained in:
parent
26ce119072
commit
b674c538b2
27
src/Makefile
27
src/Makefile
@ -8,7 +8,7 @@ TZCLIENT=../tezos-client
|
|||||||
TZWEBCLIENT=../tezos-webclient
|
TZWEBCLIENT=../tezos-webclient
|
||||||
TZATTACKER=../tezos-attacker
|
TZATTACKER=../tezos-attacker
|
||||||
|
|
||||||
all: ${TZCOMPILER} ${TZNODE} ${TZCLIENT} ${TZWEBCLIENT} ${TZATTACKER}
|
all: ${TZCOMPILER} ${TZNODE} ${TZCLIENT} ${TZWEBCLIENT} # ${TZATTACKER}
|
||||||
|
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
@ -283,22 +283,26 @@ NODE_LIB_INTFS := \
|
|||||||
\
|
\
|
||||||
node/updater/fitness.mli \
|
node/updater/fitness.mli \
|
||||||
\
|
\
|
||||||
|
node/db/raw_store.mli \
|
||||||
|
node/db/store_helpers.mli \
|
||||||
|
node/db/store.mli \
|
||||||
|
\
|
||||||
node/db/ir_funview.mli \
|
node/db/ir_funview.mli \
|
||||||
node/db/persist.mli \
|
node/db/persist.mli \
|
||||||
node/db/context.mli \
|
node/db/context.mli \
|
||||||
node/db/store.mli \
|
|
||||||
node/db/db_proxy.mli \
|
|
||||||
\
|
\
|
||||||
node/updater/updater.mli \
|
node/updater/updater.mli \
|
||||||
node/updater/proto_environment.mli \
|
node/updater/proto_environment.mli \
|
||||||
node/updater/register.mli \
|
node/updater/register.mli \
|
||||||
\
|
\
|
||||||
node/shell/tezos_p2p.mli \
|
|
||||||
node/shell/state.mli \
|
node/shell/state.mli \
|
||||||
|
node/shell/distributed_db_functors.mli \
|
||||||
|
node/shell/distributed_db_message.mli \
|
||||||
|
node/shell/distributed_db_metadata.mli \
|
||||||
|
node/shell/distributed_db.mli \
|
||||||
node/shell/prevalidator.mli \
|
node/shell/prevalidator.mli \
|
||||||
node/shell/validator.mli \
|
node/shell/validator.mli \
|
||||||
\
|
\
|
||||||
node/shell/discoverer.mli \
|
|
||||||
node/shell/node_rpc_services.mli \
|
node/shell/node_rpc_services.mli \
|
||||||
node/shell/node.mli \
|
node/shell/node.mli \
|
||||||
node/shell/node_rpc.mli \
|
node/shell/node_rpc.mli \
|
||||||
@ -321,11 +325,14 @@ NODE_LIB_IMPLS := \
|
|||||||
\
|
\
|
||||||
node/updater/fitness.ml \
|
node/updater/fitness.ml \
|
||||||
\
|
\
|
||||||
|
node/db/store_sigs.ml \
|
||||||
|
node/db/raw_store.ml \
|
||||||
|
node/db/store_helpers.ml \
|
||||||
|
node/db/store.ml \
|
||||||
|
\
|
||||||
node/db/ir_funview.ml \
|
node/db/ir_funview.ml \
|
||||||
node/db/persist.ml \
|
node/db/persist.ml \
|
||||||
node/db/store.ml \
|
|
||||||
node/db/context.ml \
|
node/db/context.ml \
|
||||||
node/db/db_proxy.ml \
|
|
||||||
\
|
\
|
||||||
node/updater/protocol.ml \
|
node/updater/protocol.ml \
|
||||||
node/updater/updater.ml \
|
node/updater/updater.ml \
|
||||||
@ -333,12 +340,14 @@ NODE_LIB_IMPLS := \
|
|||||||
node/updater/proto_environment.ml \
|
node/updater/proto_environment.ml \
|
||||||
node/updater/register.ml \
|
node/updater/register.ml \
|
||||||
\
|
\
|
||||||
node/shell/tezos_p2p.ml \
|
|
||||||
node/shell/state.ml \
|
node/shell/state.ml \
|
||||||
|
node/shell/distributed_db_functors.ml \
|
||||||
|
node/shell/distributed_db_message.ml \
|
||||||
|
node/shell/distributed_db_metadata.ml \
|
||||||
|
node/shell/distributed_db.ml \
|
||||||
node/shell/prevalidator.ml \
|
node/shell/prevalidator.ml \
|
||||||
node/shell/validator.ml \
|
node/shell/validator.ml \
|
||||||
\
|
\
|
||||||
node/shell/discoverer.ml \
|
|
||||||
node/shell/node_rpc_services.ml \
|
node/shell/node_rpc_services.ml \
|
||||||
node/shell/node.ml \
|
node/shell/node.ml \
|
||||||
node/shell/node_rpc.ml \
|
node/shell/node_rpc.ml \
|
||||||
|
@ -17,6 +17,7 @@ module Ed25519 = Proto.Local_environment.Environment.Ed25519
|
|||||||
let genesis_block_hashed = Block_hash.of_b58check
|
let genesis_block_hashed = Block_hash.of_b58check
|
||||||
"BLockGenesisGenesisGenesisGenesisGenesisGeneskvg68z"
|
"BLockGenesisGenesisGenesisGenesisGenesisGeneskvg68z"
|
||||||
let network = Store.Net genesis_block_hashed
|
let network = Store.Net genesis_block_hashed
|
||||||
|
let network = Store.Net_id.Id genesis_block_hashed
|
||||||
|
|
||||||
(* the bootstrap accounts and actions like signing to do with them *)
|
(* the bootstrap accounts and actions like signing to do with them *)
|
||||||
let source_account = List.nth Proto.Bootstrap_storage.accounts 4
|
let source_account = List.nth Proto.Bootstrap_storage.accounts 4
|
||||||
@ -32,7 +33,7 @@ let block_forged ?prev ops =
|
|||||||
[ MBytes.of_string Proto.Constants_repr.version_number ;
|
[ MBytes.of_string Proto.Constants_repr.version_number ;
|
||||||
Proto.Fitness_repr.int64_to_bytes x ] in
|
Proto.Fitness_repr.int64_to_bytes x ] in
|
||||||
let pred = match prev with None -> genesis_block_hashed | Some x -> x in
|
let pred = match prev with None -> genesis_block_hashed | Some x -> x in
|
||||||
let block ops = Store.{ net_id = network ;
|
let block ops = Store.Block_header.{ net_id = network ;
|
||||||
predecessor = pred ;
|
predecessor = pred ;
|
||||||
timestamp = Time.now () ;
|
timestamp = Time.now () ;
|
||||||
fitness = from_int64 1L;
|
fitness = from_int64 1L;
|
||||||
@ -117,8 +118,8 @@ let try_action addr port action =
|
|||||||
~incoming:false
|
~incoming:false
|
||||||
conn
|
conn
|
||||||
(addr, port)
|
(addr, port)
|
||||||
identity Tezos_p2p.Raw.supported_versions >>=? fun (_, auth_fd) ->
|
identity Distributed_db.Raw.supported_versions >>=? fun (_, auth_fd) ->
|
||||||
P2p_connection.accept auth_fd Tezos_p2p.Raw.encoding >>= function
|
P2p_connection.accept auth_fd Distributed_db.Raw.encoding >>= function
|
||||||
| Error _ -> failwith "Connection rejected by peer."
|
| Error _ -> failwith "Connection rejected by peer."
|
||||||
| Ok conn ->
|
| Ok conn ->
|
||||||
action conn >>=? fun () ->
|
action conn >>=? fun () ->
|
||||||
@ -130,8 +131,8 @@ let replicate n x =
|
|||||||
if n <= 0 then acc else replicate_acc (x :: acc) (n-1) x in
|
if n <= 0 then acc else replicate_acc (x :: acc) (n-1) x in
|
||||||
replicate_acc [] n x
|
replicate_acc [] n x
|
||||||
|
|
||||||
let send conn (msg : Tezos_p2p.msg) =
|
let send conn (msg : Distributed_db.Message.t) =
|
||||||
P2p_connection.write conn (Tezos_p2p.Raw.Message msg)
|
P2p_connection.write conn (P2p.Raw.Message msg)
|
||||||
|
|
||||||
let request_block_times block_hash n conn =
|
let request_block_times block_hash n conn =
|
||||||
let open Block_hash in
|
let open Block_hash in
|
||||||
@ -139,7 +140,7 @@ let request_block_times block_hash n conn =
|
|||||||
"requesting %a block %d times"
|
"requesting %a block %d times"
|
||||||
pp_short block_hash n >>= fun () ->
|
pp_short block_hash n >>= fun () ->
|
||||||
let block_hashes = replicate n block_hash in
|
let block_hashes = replicate n block_hash in
|
||||||
send conn (Get_blocks block_hashes)
|
send conn (Get_block_headers (network, block_hashes))
|
||||||
|
|
||||||
let request_op_times op_signed n conn =
|
let request_op_times op_signed n conn =
|
||||||
let open Operation_hash in
|
let open Operation_hash in
|
||||||
|
@ -46,20 +46,20 @@ let ignore_context =
|
|||||||
|
|
||||||
exception Version_not_found
|
exception Version_not_found
|
||||||
|
|
||||||
let versions = Protocol_hash_table.create 7
|
let versions = Protocol_hash.Table.create 7
|
||||||
|
|
||||||
let get_versions () =
|
let get_versions () =
|
||||||
Protocol_hash_table.fold
|
Protocol_hash.Table.fold
|
||||||
(fun k c acc -> (k, c) :: acc)
|
(fun k c acc -> (k, c) :: acc)
|
||||||
versions
|
versions
|
||||||
[]
|
[]
|
||||||
|
|
||||||
let register name commands =
|
let register name commands =
|
||||||
let previous =
|
let previous =
|
||||||
try Protocol_hash_table.find versions name
|
try Protocol_hash.Table.find versions name
|
||||||
with Not_found -> [] in
|
with Not_found -> [] in
|
||||||
Protocol_hash_table.add versions name (commands @ previous)
|
Protocol_hash.Table.add versions name (commands @ previous)
|
||||||
|
|
||||||
let commands_for_version version =
|
let commands_for_version version =
|
||||||
try Protocol_hash_table.find versions version
|
try Protocol_hash.Table.find versions version
|
||||||
with Not_found -> raise Version_not_found
|
with Not_found -> raise Version_not_found
|
||||||
|
@ -91,18 +91,18 @@ let tls = in_both_groups @@
|
|||||||
|
|
||||||
(* Version specific options *)
|
(* Version specific options *)
|
||||||
|
|
||||||
let contextual_options : (unit -> unit) ref Protocol_hash_table.t =
|
let contextual_options : (unit -> unit) ref Protocol_hash.Table.t =
|
||||||
Protocol_hash_table.create 7
|
Protocol_hash.Table.create 7
|
||||||
|
|
||||||
let register_config_option version option =
|
let register_config_option version option =
|
||||||
let callback () =
|
let callback () =
|
||||||
file_group # add option ;
|
file_group # add option ;
|
||||||
cli_group # add option in
|
cli_group # add option in
|
||||||
try
|
try
|
||||||
let cont = Protocol_hash_table.find contextual_options version in
|
let cont = Protocol_hash.Table.find contextual_options version in
|
||||||
cont := fun () -> callback () ; !cont ()
|
cont := fun () -> callback () ; !cont ()
|
||||||
with Not_found ->
|
with Not_found ->
|
||||||
Protocol_hash_table.add contextual_options version (ref callback)
|
Protocol_hash.Table.add contextual_options version (ref callback)
|
||||||
|
|
||||||
(* Entry point *)
|
(* Entry point *)
|
||||||
|
|
||||||
@ -115,7 +115,7 @@ let parse_args ?version usage dispatcher argv cctxt =
|
|||||||
| None -> ()
|
| None -> ()
|
||||||
| Some version ->
|
| Some version ->
|
||||||
try
|
try
|
||||||
!(Protocol_hash_table.find contextual_options version) ()
|
!(Protocol_hash.Table.find contextual_options version) ()
|
||||||
with Not_found -> () end ;
|
with Not_found -> () end ;
|
||||||
let anon dispatch n = match dispatch (`Arg n) with
|
let anon dispatch n = match dispatch (`Arg n) with
|
||||||
| `Nop -> ()
|
| `Nop -> ()
|
||||||
|
@ -152,8 +152,6 @@ let describe cctxt ?recurse path =
|
|||||||
get_json cctxt (prefix @ path) arg >>=
|
get_json cctxt (prefix @ path) arg >>=
|
||||||
parse_answer cctxt Services.describe prefix
|
parse_answer cctxt Services.describe prefix
|
||||||
|
|
||||||
type net = Services.Blocks.net = Net of Block_hash.t
|
|
||||||
|
|
||||||
module Blocks = struct
|
module Blocks = struct
|
||||||
type block = Services.Blocks.block
|
type block = Services.Blocks.block
|
||||||
|
|
||||||
@ -164,9 +162,9 @@ module Blocks = struct
|
|||||||
timestamp: Time.t ;
|
timestamp: Time.t ;
|
||||||
protocol: Protocol_hash.t option ;
|
protocol: Protocol_hash.t option ;
|
||||||
operations: Operation_hash.t list option ;
|
operations: Operation_hash.t list option ;
|
||||||
net: net ;
|
net: Updater.Net_id.t ;
|
||||||
test_protocol: Protocol_hash.t option ;
|
test_protocol: Protocol_hash.t option ;
|
||||||
test_network: (net * Time.t) option ;
|
test_network: (Updater.Net_id.t * Time.t) option ;
|
||||||
}
|
}
|
||||||
type preapply_param = Services.Blocks.preapply_param = {
|
type preapply_param = Services.Blocks.preapply_param = {
|
||||||
operations: Operation_hash.t list ;
|
operations: Operation_hash.t list ;
|
||||||
|
@ -7,15 +7,13 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
type net = State.net_id = Net of Block_hash.t
|
|
||||||
|
|
||||||
val errors:
|
val errors:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
Json_schema.schema Lwt.t
|
Json_schema.schema Lwt.t
|
||||||
|
|
||||||
val forge_block:
|
val forge_block:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
?net:Updater.net_id ->
|
?net:Updater.Net_id.t ->
|
||||||
?predecessor:Block_hash.t ->
|
?predecessor:Block_hash.t ->
|
||||||
?timestamp:Time.t ->
|
?timestamp:Time.t ->
|
||||||
Fitness.fitness ->
|
Fitness.fitness ->
|
||||||
@ -25,7 +23,7 @@ val forge_block:
|
|||||||
|
|
||||||
val validate_block:
|
val validate_block:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
net -> Block_hash.t ->
|
Updater.Net_id.t -> Block_hash.t ->
|
||||||
unit tzresult Lwt.t
|
unit tzresult Lwt.t
|
||||||
|
|
||||||
val inject_block:
|
val inject_block:
|
||||||
@ -57,7 +55,7 @@ module Blocks : sig
|
|||||||
|
|
||||||
val net:
|
val net:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block -> net Lwt.t
|
block -> Updater.Net_id.t Lwt.t
|
||||||
val predecessor:
|
val predecessor:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block -> Block_hash.t Lwt.t
|
block -> Block_hash.t Lwt.t
|
||||||
@ -81,11 +79,11 @@ module Blocks : sig
|
|||||||
block -> Protocol_hash.t option Lwt.t
|
block -> Protocol_hash.t option Lwt.t
|
||||||
val test_network:
|
val test_network:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block -> (net * Time.t) option Lwt.t
|
block -> (Updater.Net_id.t * Time.t) option Lwt.t
|
||||||
|
|
||||||
val pending_operations:
|
val pending_operations:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block -> (error Updater.preapply_result * Operation_hash_set.t) Lwt.t
|
block -> (error Updater.preapply_result * Operation_hash.Set.t) Lwt.t
|
||||||
|
|
||||||
type block_info = {
|
type block_info = {
|
||||||
hash: Block_hash.t ;
|
hash: Block_hash.t ;
|
||||||
@ -94,9 +92,9 @@ module Blocks : sig
|
|||||||
timestamp: Time.t ;
|
timestamp: Time.t ;
|
||||||
protocol: Protocol_hash.t option ;
|
protocol: Protocol_hash.t option ;
|
||||||
operations: Operation_hash.t list option ;
|
operations: Operation_hash.t list option ;
|
||||||
net: net ;
|
net: Updater.Net_id.t ;
|
||||||
test_protocol: Protocol_hash.t option ;
|
test_protocol: Protocol_hash.t option ;
|
||||||
test_network: (net * Time.t) option ;
|
test_network: (Updater.Net_id.t * Time.t) option ;
|
||||||
}
|
}
|
||||||
|
|
||||||
val info:
|
val info:
|
||||||
@ -134,18 +132,18 @@ module Operations : sig
|
|||||||
val monitor:
|
val monitor:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
?contents:bool -> unit ->
|
?contents:bool -> unit ->
|
||||||
(Operation_hash.t * Store.operation option) list Lwt_stream.t Lwt.t
|
(Operation_hash.t * Store.Operation.t option) list Lwt_stream.t Lwt.t
|
||||||
end
|
end
|
||||||
|
|
||||||
module Protocols : sig
|
module Protocols : sig
|
||||||
val bytes:
|
val bytes:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
Protocol_hash.t -> Store.protocol tzresult Time.timed_data Lwt.t
|
Protocol_hash.t -> Store.Protocol.t Lwt.t
|
||||||
|
|
||||||
val list:
|
val list:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
?contents:bool -> unit ->
|
?contents:bool -> unit ->
|
||||||
(Protocol_hash.t * Store.protocol option) list Lwt.t
|
(Protocol_hash.t * Store.Protocol.t option) list Lwt.t
|
||||||
end
|
end
|
||||||
|
|
||||||
val complete:
|
val complete:
|
||||||
|
@ -50,11 +50,10 @@ let commands () =
|
|||||||
@@ param ~name:"protocol hash" ~desc:"" check_hash
|
@@ param ~name:"protocol hash" ~desc:"" check_hash
|
||||||
@@ stop)
|
@@ stop)
|
||||||
(fun ph cctxt ->
|
(fun ph cctxt ->
|
||||||
Client_node_rpcs.Protocols.bytes cctxt ph >>= fun { data } -> match data with
|
Client_node_rpcs.Protocols.bytes cctxt ph >>= fun proto ->
|
||||||
| Ok proto ->
|
Updater.extract "" ph proto >>= fun () ->
|
||||||
Updater.extract "" ph proto >>= fun () ->
|
cctxt.message "Extracted protocol %a" Protocol_hash.pp_short ph) ;
|
||||||
cctxt.message "Extracted protocol %a" Protocol_hash.pp_short ph
|
(* | Error err -> *)
|
||||||
| Error err ->
|
(* cctxt.error "Error while dumping protocol %a: %a" *)
|
||||||
cctxt.error "Error while dumping protocol %a: %a"
|
(* Protocol_hash.pp_short ph Error_monad.pp_print_error err); *)
|
||||||
Protocol_hash.pp_short ph Error_monad.pp_print_error err);
|
|
||||||
]
|
]
|
||||||
|
@ -44,7 +44,7 @@ let inject_block cctxt block
|
|||||||
let seed_nonce_hash = Nonce.hash seed_nonce in
|
let seed_nonce_hash = Nonce.hash seed_nonce in
|
||||||
Client_proto_rpcs.Context.next_level cctxt block >>=? fun level ->
|
Client_proto_rpcs.Context.next_level cctxt block >>=? fun level ->
|
||||||
let shell =
|
let shell =
|
||||||
{ Store.net_id = bi.net ; predecessor = bi.hash ;
|
{ Store.Block_header.net_id = bi.net ; predecessor = bi.hash ;
|
||||||
timestamp ; fitness ; operations } in
|
timestamp ; fitness ; operations } in
|
||||||
let slot = level.level, Int32.of_int priority in
|
let slot = level.level, Int32.of_int priority in
|
||||||
compute_stamp cctxt block
|
compute_stamp cctxt block
|
||||||
@ -82,8 +82,8 @@ let forge_block cctxt block
|
|||||||
match operations with
|
match operations with
|
||||||
| None ->
|
| None ->
|
||||||
Client_node_rpcs.Blocks.pending_operations cctxt block >|= fun (ops, pendings) ->
|
Client_node_rpcs.Blocks.pending_operations cctxt block >|= fun (ops, pendings) ->
|
||||||
Operation_hash_set.elements @@
|
Operation_hash.Set.elements @@
|
||||||
Operation_hash_set.union (Updater.operations ops) pendings
|
Operation_hash.Set.union (Updater.operations ops) pendings
|
||||||
| Some operations -> Lwt.return operations
|
| Some operations -> Lwt.return operations
|
||||||
end >>= fun operations ->
|
end >>= fun operations ->
|
||||||
begin
|
begin
|
||||||
@ -129,9 +129,9 @@ let forge_block cctxt block
|
|||||||
Time.pp_hum timestamp >>= fun () ->
|
Time.pp_hum timestamp >>= fun () ->
|
||||||
lwt_log_info "Computed fitness %a" Fitness.pp fitness >>= fun () ->
|
lwt_log_info "Computed fitness %a" Fitness.pp fitness >>= fun () ->
|
||||||
if best_effort
|
if best_effort
|
||||||
|| ( Operation_hash_map.is_empty operations.refused
|
|| ( Operation_hash.Map.is_empty operations.refused
|
||||||
&& Operation_hash_map.is_empty operations.branch_refused
|
&& Operation_hash.Map.is_empty operations.branch_refused
|
||||||
&& Operation_hash_map.is_empty operations.branch_delayed ) then
|
&& Operation_hash.Map.is_empty operations.branch_delayed ) then
|
||||||
inject_block cctxt ?force ~src_sk
|
inject_block cctxt ?force ~src_sk
|
||||||
~priority ~timestamp ~fitness ~seed_nonce block operations.applied
|
~priority ~timestamp ~fitness ~seed_nonce block operations.applied
|
||||||
else
|
else
|
||||||
@ -365,7 +365,7 @@ let mine cctxt state =
|
|||||||
Client_node_rpcs.Blocks.pending_operations cctxt
|
Client_node_rpcs.Blocks.pending_operations cctxt
|
||||||
block >>= fun (res, ops) ->
|
block >>= fun (res, ops) ->
|
||||||
let operations =
|
let operations =
|
||||||
let open Operation_hash_set in
|
let open Operation_hash.Set in
|
||||||
elements (union ops (Updater.operations res)) in
|
elements (union ops (Updater.operations res)) in
|
||||||
let request = List.length operations in
|
let request = List.length operations in
|
||||||
Client_node_rpcs.Blocks.preapply cctxt block
|
Client_node_rpcs.Blocks.preapply cctxt block
|
||||||
|
@ -25,7 +25,7 @@ let monitor cctxt ?contents ?check () =
|
|||||||
(fun (hash, bytes) ->
|
(fun (hash, bytes) ->
|
||||||
match bytes with
|
match bytes with
|
||||||
| None -> Lwt.return (Some { hash; content = None })
|
| None -> Lwt.return (Some { hash; content = None })
|
||||||
| Some ({ Store.shell ; proto } : Updater.raw_operation) ->
|
| Some ({ Store.Operation.shell ; proto } : Updater.raw_operation) ->
|
||||||
Client_proto_rpcs.Helpers.Parse.operations cctxt
|
Client_proto_rpcs.Helpers.Parse.operations cctxt
|
||||||
`Prevalidation ?check shell proto >>= function
|
`Prevalidation ?check shell proto >>= function
|
||||||
| Ok proto -> Lwt.return (Some { hash ; content = Some (shell, proto) })
|
| Ok proto -> Lwt.return (Some { hash ; content = Some (shell, proto) })
|
||||||
|
@ -16,7 +16,6 @@ let handle_error cctxt = function
|
|||||||
pp_print_error Format.err_formatter exns ;
|
pp_print_error Format.err_formatter exns ;
|
||||||
cctxt.Client_commands.error "%s" "cannot continue"
|
cctxt.Client_commands.error "%s" "cannot continue"
|
||||||
|
|
||||||
type net = State.net_id = Net of Block_hash.t
|
|
||||||
type block = [
|
type block = [
|
||||||
| `Genesis
|
| `Genesis
|
||||||
| `Head of int | `Prevalidation
|
| `Head of int | `Prevalidation
|
||||||
|
@ -10,8 +10,6 @@
|
|||||||
val string_of_errors: error list -> string
|
val string_of_errors: error list -> string
|
||||||
val handle_error: Client_commands.context -> 'a tzresult -> 'a Lwt.t
|
val handle_error: Client_commands.context -> 'a tzresult -> 'a Lwt.t
|
||||||
|
|
||||||
type net = State.net_id = Net of Block_hash.t
|
|
||||||
|
|
||||||
type block = [
|
type block = [
|
||||||
| `Genesis
|
| `Genesis
|
||||||
| `Head of int | `Prevalidation
|
| `Head of int | `Prevalidation
|
||||||
@ -186,7 +184,7 @@ module Helpers : sig
|
|||||||
val operations:
|
val operations:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:Contract.t ->
|
source:Contract.t ->
|
||||||
?sourcePubKey:public_key ->
|
?sourcePubKey:public_key ->
|
||||||
counter:int32 ->
|
counter:int32 ->
|
||||||
@ -196,7 +194,7 @@ module Helpers : sig
|
|||||||
val transaction:
|
val transaction:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:Contract.t ->
|
source:Contract.t ->
|
||||||
?sourcePubKey:public_key ->
|
?sourcePubKey:public_key ->
|
||||||
counter:int32 ->
|
counter:int32 ->
|
||||||
@ -208,7 +206,7 @@ module Helpers : sig
|
|||||||
val origination:
|
val origination:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:Contract.t ->
|
source:Contract.t ->
|
||||||
?sourcePubKey:public_key ->
|
?sourcePubKey:public_key ->
|
||||||
counter:int32 ->
|
counter:int32 ->
|
||||||
@ -224,7 +222,7 @@ module Helpers : sig
|
|||||||
val issuance:
|
val issuance:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:Contract.t ->
|
source:Contract.t ->
|
||||||
?sourcePubKey:public_key ->
|
?sourcePubKey:public_key ->
|
||||||
counter:int32 ->
|
counter:int32 ->
|
||||||
@ -235,7 +233,7 @@ module Helpers : sig
|
|||||||
val delegation:
|
val delegation:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:Contract.t ->
|
source:Contract.t ->
|
||||||
?sourcePubKey:public_key ->
|
?sourcePubKey:public_key ->
|
||||||
counter:int32 ->
|
counter:int32 ->
|
||||||
@ -247,14 +245,14 @@ module Helpers : sig
|
|||||||
val operations:
|
val operations:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:public_key ->
|
source:public_key ->
|
||||||
delegate_operation list ->
|
delegate_operation list ->
|
||||||
MBytes.t tzresult Lwt.t
|
MBytes.t tzresult Lwt.t
|
||||||
val endorsement:
|
val endorsement:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
source:public_key ->
|
source:public_key ->
|
||||||
block:Block_hash.t ->
|
block:Block_hash.t ->
|
||||||
slot:int ->
|
slot:int ->
|
||||||
@ -264,13 +262,13 @@ module Helpers : sig
|
|||||||
val operations:
|
val operations:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
anonymous_operation list ->
|
anonymous_operation list ->
|
||||||
MBytes.t tzresult Lwt.t
|
MBytes.t tzresult Lwt.t
|
||||||
val seed_nonce_revelation:
|
val seed_nonce_revelation:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
level:Raw_level.t ->
|
level:Raw_level.t ->
|
||||||
nonce:Nonce.t ->
|
nonce:Nonce.t ->
|
||||||
unit -> MBytes.t tzresult Lwt.t
|
unit -> MBytes.t tzresult Lwt.t
|
||||||
@ -278,7 +276,7 @@ module Helpers : sig
|
|||||||
val block:
|
val block:
|
||||||
Client_commands.context ->
|
Client_commands.context ->
|
||||||
block ->
|
block ->
|
||||||
net:net ->
|
net:Updater.Net_id.t ->
|
||||||
predecessor:Block_hash.t ->
|
predecessor:Block_hash.t ->
|
||||||
timestamp:Time.t ->
|
timestamp:Time.t ->
|
||||||
fitness:Fitness.t ->
|
fitness:Fitness.t ->
|
||||||
|
@ -9,20 +9,20 @@
|
|||||||
|
|
||||||
(* Tezos Web Interface - version dependent services *)
|
(* Tezos Web Interface - version dependent services *)
|
||||||
|
|
||||||
let contextual_static_files : string OCamlRes.Res.root Protocol_hash_table.t =
|
let contextual_static_files : string OCamlRes.Res.root Protocol_hash.Table.t =
|
||||||
Protocol_hash_table.create 7
|
Protocol_hash.Table.create 7
|
||||||
|
|
||||||
let register_static_files version root =
|
let register_static_files version root =
|
||||||
Protocol_hash_table.add contextual_static_files version root
|
Protocol_hash.Table.add contextual_static_files version root
|
||||||
|
|
||||||
let find_contextual_static_files version =
|
let find_contextual_static_files version =
|
||||||
Protocol_hash_table.find contextual_static_files version
|
Protocol_hash.Table.find contextual_static_files version
|
||||||
|
|
||||||
let contextual_services : Node_rpc_services.Blocks.block RPC.directory Protocol_hash_table.t =
|
let contextual_services : Node_rpc_services.Blocks.block RPC.directory Protocol_hash.Table.t =
|
||||||
Protocol_hash_table.create 7
|
Protocol_hash.Table.create 7
|
||||||
|
|
||||||
let register_services version root =
|
let register_services version root =
|
||||||
Protocol_hash_table.add contextual_services version root
|
Protocol_hash.Table.add contextual_services version root
|
||||||
|
|
||||||
let find_contextual_services version =
|
let find_contextual_services version =
|
||||||
Protocol_hash_table.find contextual_services version
|
Protocol_hash.Table.find contextual_services version
|
||||||
|
@ -126,6 +126,7 @@ module Meta = struct
|
|||||||
end
|
end
|
||||||
|
|
||||||
module Protocol = struct
|
module Protocol = struct
|
||||||
|
|
||||||
type component = {
|
type component = {
|
||||||
name: string;
|
name: string;
|
||||||
interface: string option;
|
interface: string option;
|
||||||
@ -143,8 +144,12 @@ module Protocol = struct
|
|||||||
(req "implementation" string))
|
(req "implementation" string))
|
||||||
|
|
||||||
type t = component list
|
type t = component list
|
||||||
|
type protocol = t
|
||||||
let encoding = Data_encoding.list component_encoding
|
let encoding = Data_encoding.list component_encoding
|
||||||
|
|
||||||
|
let compare = Pervasives.compare
|
||||||
|
let equal = (=)
|
||||||
|
|
||||||
let to_bytes v = Data_encoding.Binary.to_bytes encoding v
|
let to_bytes v = Data_encoding.Binary.to_bytes encoding v
|
||||||
let of_bytes b = Data_encoding.Binary.of_bytes encoding b
|
let of_bytes b = Data_encoding.Binary.of_bytes encoding b
|
||||||
let hash proto = Protocol_hash.hash_bytes [to_bytes proto]
|
let hash proto = Protocol_hash.hash_bytes [to_bytes proto]
|
||||||
|
@ -7,6 +7,8 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Hash
|
||||||
|
|
||||||
(** Low-level part of the [Updater]. *)
|
(** Low-level part of the [Updater]. *)
|
||||||
|
|
||||||
module Meta : sig
|
module Meta : sig
|
||||||
@ -15,21 +17,25 @@ module Meta : sig
|
|||||||
end
|
end
|
||||||
|
|
||||||
module Protocol : sig
|
module Protocol : sig
|
||||||
type component = {
|
|
||||||
name : string;
|
|
||||||
interface : string option;
|
|
||||||
implementation : string;
|
|
||||||
}
|
|
||||||
val find_component : Lwt_io.file_name -> string -> component
|
|
||||||
val component_encoding : component Data_encoding.encoding
|
|
||||||
|
|
||||||
type t = component list
|
type t = component list
|
||||||
val encoding : t Data_encoding.encoding
|
|
||||||
val to_bytes : t -> MBytes.t
|
|
||||||
val of_bytes : MBytes.t -> t option
|
|
||||||
val hash : t -> Hash.Protocol_hash.t
|
|
||||||
|
|
||||||
val of_dir : Lwt_io.file_name -> t
|
and component = {
|
||||||
|
name: string ;
|
||||||
|
interface: string option ;
|
||||||
|
implementation: string ;
|
||||||
|
}
|
||||||
|
|
||||||
|
type protocol = t
|
||||||
|
|
||||||
|
val compare: protocol -> protocol -> int
|
||||||
|
val equal: protocol -> protocol -> bool
|
||||||
|
|
||||||
|
val hash: protocol -> Protocol_hash.t
|
||||||
|
val encoding: protocol Data_encoding.encoding
|
||||||
|
|
||||||
|
val of_dir: Lwt_io.file_name -> protocol
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
val main: unit -> unit
|
val main: unit -> unit
|
||||||
|
@ -161,7 +161,8 @@ val assoc : 'a encoding -> (string * 'a) list encoding
|
|||||||
|
|
||||||
type 't case
|
type 't case
|
||||||
val case :
|
val case :
|
||||||
?tag:int -> 'a encoding -> ('t -> 'a option) -> ('a -> 't) -> 't case
|
?tag:int ->
|
||||||
|
'a encoding -> ('t -> 'a option) -> ('a -> 't) -> 't case
|
||||||
val union :
|
val union :
|
||||||
?tag_size:[ `Uint8 | `Uint16 ] -> 't case list -> 't encoding
|
?tag_size:[ `Uint8 | `Uint16 ] -> 't case list -> 't encoding
|
||||||
|
|
||||||
|
@ -108,6 +108,13 @@ let rec remove_elem_from_list nb = function
|
|||||||
| l when nb <= 0 -> l
|
| l when nb <= 0 -> l
|
||||||
| _ :: tl -> remove_elem_from_list (nb - 1) tl
|
| _ :: tl -> remove_elem_from_list (nb - 1) tl
|
||||||
|
|
||||||
|
let rec split_list_at n l =
|
||||||
|
let rec split n acc = function
|
||||||
|
| [] -> List.rev acc, []
|
||||||
|
| l when n <= 0 -> List.rev acc, l
|
||||||
|
| hd :: tl -> split (n - 1) (hd :: acc) tl in
|
||||||
|
split n [] l
|
||||||
|
|
||||||
let has_prefix ~prefix s =
|
let has_prefix ~prefix s =
|
||||||
let x = String.length prefix in
|
let x = String.length prefix in
|
||||||
let n = String.length s in
|
let n = String.length s in
|
||||||
|
@ -30,6 +30,7 @@ val display_paragraph: Format.formatter -> string -> unit
|
|||||||
|
|
||||||
(** [remove nb list] remove the first [nb] elements from the list [list]. *)
|
(** [remove nb list] remove the first [nb] elements from the list [list]. *)
|
||||||
val remove_elem_from_list: int -> 'a list -> 'a list
|
val remove_elem_from_list: int -> 'a list -> 'a list
|
||||||
|
val split_list_at: int -> 'a list -> 'a list * 'a list
|
||||||
|
|
||||||
val has_prefix: prefix:string -> string -> bool
|
val has_prefix: prefix:string -> string -> bool
|
||||||
val remove_prefix: prefix:string -> string -> string option
|
val remove_prefix: prefix:string -> string -> string option
|
||||||
|
@ -13,60 +13,48 @@ open Logging.Db
|
|||||||
|
|
||||||
module IrminPath = Irmin.Path.String_list
|
module IrminPath = Irmin.Path.String_list
|
||||||
|
|
||||||
module rec S : sig
|
module MBytesContent = struct
|
||||||
|
module Tc_S0 =
|
||||||
module type STORE = sig
|
(val Tc.biject Tc.cstruct Cstruct.to_bigarray Cstruct.of_bigarray)
|
||||||
|
include Tc_S0
|
||||||
include Irmin.S with type commit_id = Irmin.Hash.SHA1.t
|
module Path = Irmin.Path.String_list
|
||||||
and type key = IrminPath.t
|
let merge =
|
||||||
and type value = MBytes.t
|
let fn = Irmin.Merge.(option (module Tc_S0) (default (module Tc_S0))) in
|
||||||
and type branch_id = string
|
fun _path -> fn
|
||||||
|
|
||||||
module FunView : sig
|
|
||||||
|
|
||||||
type v
|
|
||||||
|
|
||||||
val of_path: t -> IrminPath.t -> v Lwt.t
|
|
||||||
val update_path: t -> IrminPath.t -> v -> unit Lwt.t
|
|
||||||
|
|
||||||
val mem: v -> IrminPath.t -> bool Lwt.t
|
|
||||||
val dir_mem: v -> IrminPath.t -> bool Lwt.t
|
|
||||||
val get: v -> IrminPath.t -> MBytes.t option Lwt.t
|
|
||||||
val set: v -> IrminPath.t -> MBytes.t-> v Lwt.t
|
|
||||||
val del: v -> IrminPath.t -> v Lwt.t
|
|
||||||
val list: v -> IrminPath.t list -> IrminPath.t list Lwt.t
|
|
||||||
val remove_rec: v -> IrminPath.t -> v Lwt.t
|
|
||||||
|
|
||||||
end
|
|
||||||
val path : string
|
|
||||||
val local_repo : Repo.t
|
|
||||||
val patch_context : (module S.VIEW) -> (module S.VIEW) Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
module type VIEW = sig
|
|
||||||
module Store : STORE
|
|
||||||
val s : Store.t
|
|
||||||
val v : Store.FunView.v
|
|
||||||
end
|
|
||||||
|
|
||||||
end = struct
|
|
||||||
module type STORE = S.STORE
|
|
||||||
module type VIEW = S.VIEW
|
|
||||||
end
|
end
|
||||||
|
|
||||||
include S
|
module GitStore = struct
|
||||||
|
|
||||||
let pack (type s) (type v)
|
module Store =
|
||||||
(module S : STORE with type t = s and type FunView.v = v) (s : s) (v : v) =
|
Irmin_unix.Irmin_git.FS
|
||||||
(module struct
|
(MBytesContent) (Irmin.Ref.String) (Irmin.Hash.SHA1)
|
||||||
module Store = S
|
|
||||||
let s = s
|
|
||||||
let v = v
|
|
||||||
end : VIEW)
|
|
||||||
|
|
||||||
type index = (module STORE)
|
include Store
|
||||||
|
|
||||||
type store = (module VIEW)
|
module View = Irmin.View (Store)
|
||||||
|
|
||||||
|
module FunView = struct
|
||||||
|
include Ir_funview.Make (Store)
|
||||||
|
type v = t
|
||||||
|
let get = read
|
||||||
|
let del = remove
|
||||||
|
let set = update
|
||||||
|
let list v k = Lwt_list.map_p (list v) k >|= List.flatten
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
type index = {
|
||||||
|
path: string ;
|
||||||
|
repo: GitStore.Repo.t ;
|
||||||
|
patch_context: context -> context Lwt.t ;
|
||||||
|
}
|
||||||
|
and context = {
|
||||||
|
index: index ;
|
||||||
|
store: GitStore.t ;
|
||||||
|
view: GitStore.FunView.t ;
|
||||||
|
}
|
||||||
|
type t = context
|
||||||
|
|
||||||
(*-- Version Access and Update -----------------------------------------------*)
|
(*-- Version Access and Update -----------------------------------------------*)
|
||||||
|
|
||||||
@ -78,23 +66,18 @@ let current_test_protocol_key = ["test_protocol"]
|
|||||||
let current_test_network_key = ["test_network"]
|
let current_test_network_key = ["test_network"]
|
||||||
let current_test_network_expiration_key = ["test_network_expiration"]
|
let current_test_network_expiration_key = ["test_network_expiration"]
|
||||||
let current_fork_test_network_key = ["fork_test_network"]
|
let current_fork_test_network_key = ["fork_test_network"]
|
||||||
let invalid_context_key = ["invalid_context"]
|
|
||||||
|
|
||||||
let exists (module GitStore : STORE) key =
|
let exists { repo } key =
|
||||||
GitStore.of_branch_id
|
GitStore.of_branch_id
|
||||||
Irmin.Task.none (Block_hash.to_b58check key) GitStore.local_repo >>= fun t ->
|
Irmin.Task.none (Block_hash.to_b58check key) repo >>= fun t ->
|
||||||
let store = t () in
|
let store = t () in
|
||||||
GitStore.read store genesis_block_key >>= function
|
GitStore.read store genesis_block_key >>= function
|
||||||
| Some _ ->
|
| Some _ ->
|
||||||
Lwt.return true
|
Lwt.return true
|
||||||
| None ->
|
| None ->
|
||||||
GitStore.read store invalid_context_key >>= function
|
Lwt.return false
|
||||||
| Some _ ->
|
|
||||||
Lwt.return true
|
|
||||||
| None ->
|
|
||||||
Lwt.return false
|
|
||||||
|
|
||||||
let checkout ((module GitStore : STORE) as index) key =
|
let checkout index key =
|
||||||
lwt_debug "-> Context.checkout %a"
|
lwt_debug "-> Context.checkout %a"
|
||||||
Block_hash.pp_short key >>= fun () ->
|
Block_hash.pp_short key >>= fun () ->
|
||||||
exists index key >>= fun exists ->
|
exists index key >>= fun exists ->
|
||||||
@ -102,31 +85,21 @@ let checkout ((module GitStore : STORE) as index) key =
|
|||||||
Lwt.return None
|
Lwt.return None
|
||||||
else
|
else
|
||||||
GitStore.of_branch_id
|
GitStore.of_branch_id
|
||||||
Irmin.Task.none (Block_hash.to_b58check key) GitStore.local_repo >>= fun t ->
|
Irmin.Task.none (Block_hash.to_b58check key) index.repo >>= fun t ->
|
||||||
let store = t () in
|
let store = t () in
|
||||||
GitStore.FunView.of_path store [] >>= fun v ->
|
GitStore.FunView.of_path store [] >>= fun view ->
|
||||||
|
let ctxt = { index ; store ; view } in
|
||||||
|
index.patch_context ctxt >>= fun ctxt ->
|
||||||
lwt_debug "<- Context.checkout %a OK"
|
lwt_debug "<- Context.checkout %a OK"
|
||||||
Block_hash.pp_short key >>= fun () ->
|
Block_hash.pp_short key >>= fun () ->
|
||||||
GitStore.FunView.get v invalid_context_key >>= function
|
Lwt.return (Some ctxt)
|
||||||
| None ->
|
|
||||||
GitStore.patch_context (pack (module GitStore) store v) >>= fun ctxt ->
|
|
||||||
Lwt.return (Some (Ok ctxt))
|
|
||||||
| Some bytes ->
|
|
||||||
match Data_encoding_ezjsonm.from_string (MBytes.to_string bytes) with
|
|
||||||
| Ok (`A errors) ->
|
|
||||||
Lwt.return (Some (Error (List.map error_of_json errors)))
|
|
||||||
| Error _ | Ok _->
|
|
||||||
Lwt.return (Some (generic_error (MBytes.to_string bytes)))
|
|
||||||
|
|
||||||
exception Invalid_context of error list
|
|
||||||
|
|
||||||
let checkout_exn index key =
|
let checkout_exn index key =
|
||||||
checkout index key >>= function
|
checkout index key >>= function
|
||||||
| None -> Lwt.fail Not_found
|
| None -> Lwt.fail Not_found
|
||||||
| Some (Error error) -> Lwt.fail (Invalid_context error)
|
| Some p -> Lwt.return p
|
||||||
| Some (Ok p) -> Lwt.return p
|
|
||||||
|
|
||||||
let exists ((module GitStore : STORE) as index) key =
|
let exists index key =
|
||||||
lwt_debug "-> Context.exists %a"
|
lwt_debug "-> Context.exists %a"
|
||||||
Block_hash.pp_short key >>= fun () ->
|
Block_hash.pp_short key >>= fun () ->
|
||||||
exists index key >>= fun exists ->
|
exists index key >>= fun exists ->
|
||||||
@ -134,48 +107,27 @@ let exists ((module GitStore : STORE) as index) key =
|
|||||||
Block_hash.pp_short key exists >>= fun () ->
|
Block_hash.pp_short key exists >>= fun () ->
|
||||||
Lwt.return exists
|
Lwt.return exists
|
||||||
|
|
||||||
exception Preexistent_context of string * Block_hash.t
|
exception Preexistent_context of Block_hash.t
|
||||||
exception Empty_head of string * Block_hash.t
|
exception Empty_head of Block_hash.t
|
||||||
|
|
||||||
let commit (module GitStore : STORE) block key (module View : VIEW) =
|
let commit block key context =
|
||||||
let module GitStore = View.Store in
|
|
||||||
let task =
|
let task =
|
||||||
Irmin.Task.create
|
Irmin.Task.create
|
||||||
~date:(Time.to_seconds block.Store.shell.timestamp) ~owner:"tezos" in
|
~date:(Time.to_seconds block.Store.Block_header.shell.timestamp)
|
||||||
GitStore.clone task View.s (Block_hash.to_b58check key) >>= function
|
~owner:"tezos" in
|
||||||
| `Empty_head -> Lwt.fail (Empty_head (GitStore.path, key))
|
GitStore.clone task context.store (Block_hash.to_b58check key) >>= function
|
||||||
| `Duplicated_branch -> Lwt.fail (Preexistent_context (GitStore.path, key))
|
| `Empty_head -> Lwt.fail (Empty_head key)
|
||||||
|
| `Duplicated_branch -> Lwt.fail (Preexistent_context key)
|
||||||
| `Ok store ->
|
| `Ok store ->
|
||||||
let msg =
|
let msg =
|
||||||
Format.asprintf "%a %a"
|
Format.asprintf "%a %a"
|
||||||
Fitness.pp block.shell.fitness
|
Fitness.pp block.shell.fitness
|
||||||
Block_hash.pp_short key in
|
Block_hash.pp_short key in
|
||||||
GitStore.FunView.update_path (store msg) [] View.v
|
GitStore.FunView.update_path (store msg) [] context.view
|
||||||
|
|
||||||
let commit_invalid (module GitStore : STORE) block key exns =
|
|
||||||
let task =
|
|
||||||
Irmin.Task.create
|
|
||||||
~date:(Time.to_seconds block.Store.shell.timestamp) ~owner:"tezos" in
|
|
||||||
GitStore.of_branch_id
|
|
||||||
task (Block_hash.to_b58check key) GitStore.local_repo >>= fun t ->
|
|
||||||
let msg =
|
|
||||||
Format.asprintf "%a %a"
|
|
||||||
Fitness.pp block.shell.fitness
|
|
||||||
Block_hash.pp_short key in
|
|
||||||
let store = t msg in
|
|
||||||
GitStore.clone Irmin.Task.none store (Block_hash.to_b58check key) >>= function
|
|
||||||
| `Empty_head ->
|
|
||||||
GitStore.update store invalid_context_key
|
|
||||||
(MBytes.of_string @@ Data_encoding_ezjsonm.to_string @@
|
|
||||||
`A (List.map json_of_error exns))
|
|
||||||
| `Duplicated_branch | `Ok _ ->
|
|
||||||
Lwt.fail (Preexistent_context (GitStore.path, key))
|
|
||||||
|
|
||||||
|
|
||||||
(*-- Generic Store Primitives ------------------------------------------------*)
|
(*-- Generic Store Primitives ------------------------------------------------*)
|
||||||
|
|
||||||
type t = store
|
|
||||||
|
|
||||||
type key = string list
|
type key = string list
|
||||||
|
|
||||||
let data_key key = "data" :: key
|
let data_key key = "data" :: key
|
||||||
@ -183,98 +135,71 @@ let undata_key = function
|
|||||||
| "data" :: key -> key
|
| "data" :: key -> key
|
||||||
| _ -> assert false
|
| _ -> assert false
|
||||||
|
|
||||||
let mem (module View : VIEW) key =
|
let mem ctxt key =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.mem ctxt.view (data_key key) >>= fun v ->
|
||||||
GitStore.FunView.mem View.v (data_key key) >>= fun v ->
|
|
||||||
Lwt.return v
|
Lwt.return v
|
||||||
|
|
||||||
let dir_mem (module View : VIEW) key =
|
let dir_mem ctxt key =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.dir_mem ctxt.view (data_key key) >>= fun v ->
|
||||||
GitStore.FunView.dir_mem View.v (data_key key) >>= fun v ->
|
|
||||||
Lwt.return v
|
Lwt.return v
|
||||||
|
|
||||||
let raw_get (module View : VIEW) key =
|
let raw_get ctxt key =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.get ctxt.view key >>= function
|
||||||
GitStore.FunView.get View.v key >>= function
|
|
||||||
| None -> Lwt.return_none
|
| None -> Lwt.return_none
|
||||||
| Some bytes -> Lwt.return (Some bytes)
|
| Some bytes -> Lwt.return (Some bytes)
|
||||||
let get t key = raw_get t (data_key key)
|
let get t key = raw_get t (data_key key)
|
||||||
|
|
||||||
let raw_set (module View : VIEW) key data =
|
let raw_set ctxt key data =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.set ctxt.view key data >>= fun view ->
|
||||||
GitStore.FunView.set View.v key data >>= fun v ->
|
Lwt.return { ctxt with view }
|
||||||
Lwt.return (pack (module GitStore) View.s v)
|
|
||||||
let set t key data = raw_set t (data_key key) data
|
let set t key data = raw_set t (data_key key) data
|
||||||
|
|
||||||
let raw_del (module View : VIEW) key =
|
let raw_del ctxt key =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.del ctxt.view key >>= fun view ->
|
||||||
GitStore.FunView.del View.v key >>= fun v ->
|
Lwt.return { ctxt with view }
|
||||||
Lwt.return (pack (module GitStore) View.s v)
|
|
||||||
let del t key = raw_del t (data_key key)
|
let del t key = raw_del t (data_key key)
|
||||||
|
|
||||||
let list (module View : VIEW) keys =
|
let list ctxt keys =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.list ctxt.view (List.map data_key keys) >>= fun keys ->
|
||||||
GitStore.FunView.list View.v (List.map data_key keys) >>= fun v ->
|
Lwt.return (List.map undata_key keys)
|
||||||
Lwt.return (List.map undata_key v)
|
|
||||||
|
|
||||||
let remove_rec (module View : VIEW) key =
|
let remove_rec ctxt key =
|
||||||
let module GitStore = View.Store in
|
GitStore.FunView.remove_rec ctxt.view (data_key key) >>= fun view ->
|
||||||
GitStore.FunView.remove_rec View.v (data_key key) >>= fun v ->
|
Lwt.return { ctxt with view }
|
||||||
Lwt.return (pack (module GitStore) View.s v)
|
|
||||||
|
|
||||||
let keys (module View : VIEW) = Store.undefined_key_fn
|
|
||||||
|
|
||||||
(*-- Initialisation ----------------------------------------------------------*)
|
(*-- Initialisation ----------------------------------------------------------*)
|
||||||
|
|
||||||
let init ?patch_context ~root =
|
let init ?patch_context ~root =
|
||||||
let module GitStore =
|
|
||||||
Irmin_unix.Irmin_git.FS
|
|
||||||
(Store.MBytesContent) (Irmin.Ref.String) (Irmin.Hash.SHA1) in
|
|
||||||
GitStore.Repo.create
|
GitStore.Repo.create
|
||||||
(Irmin_unix.Irmin_git.config ~root ~bare:true ()) >>= fun local_repo ->
|
(Irmin_unix.Irmin_git.config ~root ~bare:true ()) >>= fun repo ->
|
||||||
let module GitStoreView = Irmin.View (GitStore) in
|
Lwt.return {
|
||||||
let module ViewStore = struct
|
path = root ;
|
||||||
|
repo ;
|
||||||
let path = root
|
patch_context =
|
||||||
let local_repo = local_repo
|
|
||||||
let patch_context =
|
|
||||||
match patch_context with
|
match patch_context with
|
||||||
| None -> (fun ctxt -> Lwt.return ctxt)
|
| None -> (fun ctxt -> Lwt.return ctxt)
|
||||||
| Some patch_context -> patch_context
|
| Some patch_context -> patch_context
|
||||||
|
}
|
||||||
|
|
||||||
include GitStore
|
let commit_genesis index ~id:block ~time ~protocol ~test_protocol =
|
||||||
|
|
||||||
module FunView = struct
|
|
||||||
include Ir_funview.Make (GitStore)
|
|
||||||
type v = t
|
|
||||||
let get = read
|
|
||||||
let del = remove
|
|
||||||
let set = update
|
|
||||||
let list v k = Lwt_list.map_p (list v) k >|= List.flatten
|
|
||||||
end
|
|
||||||
end in
|
|
||||||
Lwt.return (module ViewStore : STORE)
|
|
||||||
|
|
||||||
let create_genesis_context (module GitStore : STORE) genesis test_protocol =
|
|
||||||
GitStore.of_branch_id
|
GitStore.of_branch_id
|
||||||
Irmin.Task.none (Block_hash.to_b58check genesis.Store.block)
|
Irmin.Task.none (Block_hash.to_b58check block)
|
||||||
GitStore.local_repo >>= fun t ->
|
index.repo >>= fun t ->
|
||||||
let store = t () in
|
let store = t () in
|
||||||
GitStore.FunView.of_path store [] >>= fun v ->
|
GitStore.FunView.of_path store [] >>= fun view ->
|
||||||
GitStore.FunView.set v genesis_block_key
|
GitStore.FunView.set view genesis_block_key
|
||||||
(Block_hash.to_bytes genesis.block) >>= fun v ->
|
(Block_hash.to_bytes block) >>= fun view ->
|
||||||
GitStore.FunView.set v genesis_protocol_key
|
GitStore.FunView.set view genesis_protocol_key
|
||||||
(Protocol_hash.to_bytes genesis.protocol) >>= fun v ->
|
(Protocol_hash.to_bytes protocol) >>= fun view ->
|
||||||
GitStore.FunView.set v genesis_time_key
|
GitStore.FunView.set view genesis_time_key
|
||||||
(MBytes.of_string (Time.to_notation genesis.time)) >>= fun v ->
|
(MBytes.of_string (Time.to_notation time)) >>= fun view ->
|
||||||
GitStore.FunView.set v current_protocol_key
|
GitStore.FunView.set view current_protocol_key
|
||||||
(Protocol_hash.to_bytes genesis.protocol) >>= fun v ->
|
(Protocol_hash.to_bytes protocol) >>= fun view ->
|
||||||
GitStore.FunView.set v current_test_protocol_key
|
GitStore.FunView.set view current_test_protocol_key
|
||||||
(Protocol_hash.to_bytes test_protocol) >>= fun v ->
|
(Protocol_hash.to_bytes test_protocol) >>= fun view ->
|
||||||
let ctxt = pack (module GitStore) store v in
|
let ctxt = { index ; store ; view } in
|
||||||
GitStore.patch_context ctxt >>= fun ctxt ->
|
index.patch_context ctxt >>= fun ctxt ->
|
||||||
let (module View : VIEW) = ctxt in
|
GitStore.FunView.update_path ctxt.store [] ctxt.view >>= fun () ->
|
||||||
View.Store.FunView.update_path View.s [] View.v >>= fun () ->
|
|
||||||
Lwt.return ctxt
|
Lwt.return ctxt
|
||||||
|
|
||||||
(*-- Predefined Fields -------------------------------------------------------*)
|
(*-- Predefined Fields -------------------------------------------------------*)
|
||||||
@ -282,23 +207,23 @@ let create_genesis_context (module GitStore : STORE) genesis test_protocol =
|
|||||||
let get_protocol v =
|
let get_protocol v =
|
||||||
raw_get v current_protocol_key >>= function
|
raw_get v current_protocol_key >>= function
|
||||||
| None -> assert false
|
| None -> assert false
|
||||||
| Some data -> Lwt.return (Protocol_hash.of_bytes data)
|
| Some data -> Lwt.return (Protocol_hash.of_bytes_exn data)
|
||||||
let set_protocol v key =
|
let set_protocol v key =
|
||||||
raw_set v current_protocol_key (Protocol_hash.to_bytes key)
|
raw_set v current_protocol_key (Protocol_hash.to_bytes key)
|
||||||
|
|
||||||
let get_test_protocol v =
|
let get_test_protocol v =
|
||||||
raw_get v current_test_protocol_key >>= function
|
raw_get v current_test_protocol_key >>= function
|
||||||
| None -> assert false
|
| None -> assert false
|
||||||
| Some data -> Lwt.return (Protocol_hash.of_bytes data)
|
| Some data -> Lwt.return (Protocol_hash.of_bytes_exn data)
|
||||||
let set_test_protocol v data =
|
let set_test_protocol v data =
|
||||||
raw_set v current_test_protocol_key (Protocol_hash.to_bytes data)
|
raw_set v current_test_protocol_key (Protocol_hash.to_bytes data)
|
||||||
|
|
||||||
let get_test_network v =
|
let get_test_network v =
|
||||||
raw_get v current_test_network_key >>= function
|
raw_get v current_test_network_key >>= function
|
||||||
| None -> Lwt.return_none
|
| None -> Lwt.return_none
|
||||||
| Some data -> Lwt.return (Some (Store.Net (Block_hash.of_bytes data)))
|
| Some data -> Lwt.return (Some (Store.Net_id.of_bytes_exn data))
|
||||||
let set_test_network v (Store.Net data) =
|
let set_test_network v id =
|
||||||
raw_set v current_test_network_key (Block_hash.to_bytes data)
|
raw_set v current_test_network_key (Store.Net_id.to_bytes id)
|
||||||
let del_test_network v = raw_del v current_test_network_key
|
let del_test_network v = raw_del v current_test_network_key
|
||||||
|
|
||||||
let get_test_network_expiration v =
|
let get_test_network_expiration v =
|
||||||
@ -324,10 +249,31 @@ let fork_test_network v =
|
|||||||
let get_genesis_block v =
|
let get_genesis_block v =
|
||||||
raw_get v genesis_block_key >>= function
|
raw_get v genesis_block_key >>= function
|
||||||
| None -> assert false
|
| None -> assert false
|
||||||
| Some block -> Lwt.return (Block_hash.of_bytes block)
|
| Some block -> Lwt.return (Block_hash.of_bytes_exn block)
|
||||||
|
|
||||||
let get_genesis_time v =
|
let get_genesis_time v =
|
||||||
raw_get v genesis_time_key >>= function
|
raw_get v genesis_time_key >>= function
|
||||||
| None -> assert false
|
| None -> assert false
|
||||||
| Some time -> Lwt.return (Time.of_notation_exn (MBytes.to_string time))
|
| Some time -> Lwt.return (Time.of_notation_exn (MBytes.to_string time))
|
||||||
|
|
||||||
|
let init_test_network v ~time ~genesis =
|
||||||
|
get_test_protocol v >>= fun test_protocol ->
|
||||||
|
del_test_network_expiration v >>= fun v ->
|
||||||
|
set_protocol v test_protocol >>= fun v ->
|
||||||
|
raw_set v genesis_time_key
|
||||||
|
(MBytes.of_string (Time.to_notation time)) >>= fun v ->
|
||||||
|
raw_set v genesis_block_key (Block_hash.to_bytes genesis) >>= fun v ->
|
||||||
|
let task =
|
||||||
|
Irmin.Task.create
|
||||||
|
~date:(Time.to_seconds time)
|
||||||
|
~owner:"tezos" in
|
||||||
|
GitStore.clone task v.store (Block_hash.to_b58check genesis) >>= function
|
||||||
|
| `Empty_head -> Lwt.return (Error [Exn (Empty_head genesis)])
|
||||||
|
| `Duplicated_branch -> Lwt.return (Error [Exn (Preexistent_context genesis)])
|
||||||
|
| `Ok store ->
|
||||||
|
let msg =
|
||||||
|
Format.asprintf "Fake block. Forking testnet: %a."
|
||||||
|
Block_hash.pp_short genesis in
|
||||||
|
GitStore.FunView.update_path (store msg) [] v.view >>= fun () ->
|
||||||
|
return v
|
||||||
|
|
||||||
|
@ -13,50 +13,56 @@
|
|||||||
type index
|
type index
|
||||||
|
|
||||||
(** A (key x value) store for a given block. *)
|
(** A (key x value) store for a given block. *)
|
||||||
type store
|
type t
|
||||||
|
type context = t
|
||||||
|
|
||||||
(** Open or initialize a versioned store at a given path. *)
|
(** Open or initialize a versioned store at a given path. *)
|
||||||
val init:
|
val init:
|
||||||
?patch_context:(store -> store Lwt.t) ->
|
?patch_context:(context -> context Lwt.t) ->
|
||||||
root:string ->
|
root:string ->
|
||||||
index Lwt.t
|
index Lwt.t
|
||||||
|
|
||||||
val create_genesis_context:
|
val commit_genesis:
|
||||||
index -> Store.genesis -> Protocol_hash.t -> store Lwt.t
|
index ->
|
||||||
|
id:Block_hash.t ->
|
||||||
|
time:Time.t ->
|
||||||
|
protocol:Protocol_hash.t ->
|
||||||
|
test_protocol:Protocol_hash.t ->
|
||||||
|
context Lwt.t
|
||||||
|
|
||||||
(** {2 Generic interface} ****************************************************)
|
(** {2 Generic interface} ****************************************************)
|
||||||
|
|
||||||
include Persist.STORE with type t = store
|
include Persist.STORE with type t := context
|
||||||
|
|
||||||
(** {2 Accessing and Updating Versions} **************************************)
|
(** {2 Accessing and Updating Versions} **************************************)
|
||||||
|
|
||||||
exception Preexistent_context of string * Block_hash.t
|
exception Preexistent_context of Block_hash.t
|
||||||
val exists: index -> Block_hash.t -> bool Lwt.t
|
val exists: index -> Block_hash.t -> bool Lwt.t
|
||||||
val commit: index -> Store.block -> Block_hash.t -> store -> unit Lwt.t
|
val checkout: index -> Block_hash.t -> context option Lwt.t
|
||||||
val commit_invalid:
|
val checkout_exn: index -> Block_hash.t -> context Lwt.t
|
||||||
index -> Store.block -> Block_hash.t -> error list -> unit Lwt.t
|
val commit: Store.Block_header.t -> Block_hash.t -> context -> unit Lwt.t
|
||||||
val checkout: index -> Block_hash.t -> store tzresult option Lwt.t
|
|
||||||
exception Invalid_context of error list
|
|
||||||
val checkout_exn: index -> Block_hash.t -> store Lwt.t
|
|
||||||
|
|
||||||
(** {2 Predefined Fields} ****************************************************)
|
(** {2 Predefined Fields} ****************************************************)
|
||||||
|
|
||||||
val get_protocol: store -> Protocol_hash.t Lwt.t
|
val get_protocol: context -> Protocol_hash.t Lwt.t
|
||||||
val set_protocol: store -> Protocol_hash.t -> store Lwt.t
|
val set_protocol: context -> Protocol_hash.t -> context Lwt.t
|
||||||
|
|
||||||
val get_test_protocol: store -> Protocol_hash.t Lwt.t
|
val get_test_protocol: context -> Protocol_hash.t Lwt.t
|
||||||
val set_test_protocol: store -> Protocol_hash.t -> store Lwt.t
|
val set_test_protocol: context -> Protocol_hash.t -> context Lwt.t
|
||||||
|
|
||||||
val get_test_network: store -> Store.net_id option Lwt.t
|
val get_test_network: context -> Store.Net_id.t option Lwt.t
|
||||||
val set_test_network: store -> Store.net_id -> store Lwt.t
|
val set_test_network: context -> Store.Net_id.t -> context Lwt.t
|
||||||
val del_test_network: store -> store Lwt.t
|
val del_test_network: context -> context Lwt.t
|
||||||
|
|
||||||
val get_test_network_expiration: store -> Time.t option Lwt.t
|
val get_test_network_expiration: context -> Time.t option Lwt.t
|
||||||
val set_test_network_expiration: store -> Time.t -> store Lwt.t
|
val set_test_network_expiration: context -> Time.t -> context Lwt.t
|
||||||
val del_test_network_expiration: store -> store Lwt.t
|
val del_test_network_expiration: context -> context Lwt.t
|
||||||
|
|
||||||
val read_and_reset_fork_test_network: store -> (bool * store) Lwt.t
|
val read_and_reset_fork_test_network: context -> (bool * context) Lwt.t
|
||||||
val fork_test_network: store -> store Lwt.t
|
val fork_test_network: context -> context Lwt.t
|
||||||
|
|
||||||
val get_genesis_time: store -> Time.t Lwt.t
|
val get_genesis_time: context -> Time.t Lwt.t
|
||||||
val get_genesis_block: store -> Block_hash.t Lwt.t
|
val get_genesis_block: context -> Block_hash.t Lwt.t
|
||||||
|
|
||||||
|
val init_test_network:
|
||||||
|
context -> time:Time.t -> genesis:Block_hash.t -> context tzresult Lwt.t
|
||||||
|
@ -1,149 +0,0 @@
|
|||||||
(**************************************************************************)
|
|
||||||
(* *)
|
|
||||||
(* Copyright (c) 2014 - 2016. *)
|
|
||||||
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
|
||||||
(* *)
|
|
||||||
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
|
||||||
(* *)
|
|
||||||
(**************************************************************************)
|
|
||||||
|
|
||||||
module type DISTRIBUTED_DB = sig
|
|
||||||
type t
|
|
||||||
type state
|
|
||||||
type store
|
|
||||||
type key
|
|
||||||
type value
|
|
||||||
val create: state -> store Persist.shared_ref -> t
|
|
||||||
val known: t -> key -> bool Lwt.t
|
|
||||||
val read: t -> key -> value option Lwt.t
|
|
||||||
val prefetch: t -> Store.net_id -> key -> unit
|
|
||||||
val fetch: t -> Store.net_id -> key -> value Lwt.t
|
|
||||||
val pending: t -> key -> bool
|
|
||||||
val store: t -> key -> value -> bool Lwt.t
|
|
||||||
val update: t -> key -> value -> bool Lwt.t
|
|
||||||
val remove: t -> key -> bool Lwt.t
|
|
||||||
val shutdown: t -> unit Lwt.t
|
|
||||||
|
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
type operation_state = {
|
|
||||||
request_operations: Store.net_id -> Operation_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
|
|
||||||
module Operation_scheduler = struct
|
|
||||||
let name = "operation_scheduler"
|
|
||||||
type rdata = Store.net_id
|
|
||||||
type data = float ref
|
|
||||||
type state = operation_state
|
|
||||||
let init_request _ _ = Lwt.return (ref 0.0)
|
|
||||||
let request net ~get:_ ~set:_ pendings =
|
|
||||||
let current_time = Unix.gettimeofday () in
|
|
||||||
let time = current_time -. (3. +. Random.float 8.) in
|
|
||||||
let operations =
|
|
||||||
List.fold_left
|
|
||||||
(fun acc (hash, last_request, Store.Net net_id) ->
|
|
||||||
if !last_request < time then begin
|
|
||||||
last_request := current_time ;
|
|
||||||
let prev =
|
|
||||||
try Block_hash_map.find net_id acc
|
|
||||||
with Not_found -> [] in
|
|
||||||
Block_hash_map.add net_id (hash :: prev) acc
|
|
||||||
end else
|
|
||||||
acc)
|
|
||||||
Block_hash_map.empty
|
|
||||||
pendings in
|
|
||||||
if Block_hash_map.is_empty operations then
|
|
||||||
0.
|
|
||||||
else begin
|
|
||||||
Block_hash_map.iter
|
|
||||||
(fun net_id -> net.request_operations (Net net_id))
|
|
||||||
operations ;
|
|
||||||
1. +. Random.float 4.
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
module Operation =
|
|
||||||
Persist.MakeImperativeProxy
|
|
||||||
(Store.Faked_functional_operation)
|
|
||||||
(Operation_hash_table) (Operation_scheduler)
|
|
||||||
|
|
||||||
type block_state = {
|
|
||||||
request_blocks: Store.net_id -> Block_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
|
|
||||||
module Block_scheduler = struct
|
|
||||||
let name = "block_scheduler"
|
|
||||||
type rdata = Store.net_id
|
|
||||||
type data = float ref
|
|
||||||
type state = block_state
|
|
||||||
let init_request _ _ = Lwt.return (ref 0.0)
|
|
||||||
let request net ~get:_ ~set:_ pendings =
|
|
||||||
let current_time = Unix.gettimeofday () in
|
|
||||||
let limit = current_time -. (3. +. Random.float 8.) in
|
|
||||||
let blocks =
|
|
||||||
List.fold_left
|
|
||||||
(fun acc (hash, last_request, Store.Net net_id) ->
|
|
||||||
if !last_request < limit then begin
|
|
||||||
last_request := current_time ;
|
|
||||||
let prev =
|
|
||||||
try Block_hash_map.find net_id acc
|
|
||||||
with Not_found -> [] in
|
|
||||||
Block_hash_map.add net_id (hash :: prev) acc
|
|
||||||
end else
|
|
||||||
acc)
|
|
||||||
Block_hash_map.empty
|
|
||||||
pendings in
|
|
||||||
if Block_hash_map.is_empty blocks then
|
|
||||||
0.
|
|
||||||
else begin
|
|
||||||
Block_hash_map.iter
|
|
||||||
(fun net_id -> net.request_blocks (Net net_id))
|
|
||||||
blocks ;
|
|
||||||
1. +. Random.float 4.
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
module Block =
|
|
||||||
Persist.MakeImperativeProxy
|
|
||||||
(Store.Faked_functional_block)
|
|
||||||
(Block_hash_table) (Block_scheduler)
|
|
||||||
|
|
||||||
type protocol_state = {
|
|
||||||
request_protocols: Protocol_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
|
|
||||||
module Protocol_scheduler = struct
|
|
||||||
let name = "protocol_scheduler"
|
|
||||||
type rdata = Store.net_id
|
|
||||||
type data = float ref
|
|
||||||
type state = protocol_state
|
|
||||||
let init_request _ _ = Lwt.return (ref 0.0)
|
|
||||||
let request net ~get:_ ~set:_ pendings =
|
|
||||||
let current_time = Unix.gettimeofday () in
|
|
||||||
let time = current_time -. (3. +. Random.float 8.) in
|
|
||||||
let protocols =
|
|
||||||
List.fold_left
|
|
||||||
(fun acc (hash, last_request, Store.Net net_id) ->
|
|
||||||
if !last_request < time then begin
|
|
||||||
last_request := current_time ;
|
|
||||||
let prev =
|
|
||||||
try Block_hash_map.find net_id acc
|
|
||||||
with Not_found -> [] in
|
|
||||||
Block_hash_map.add net_id (hash :: prev) acc
|
|
||||||
end else
|
|
||||||
acc)
|
|
||||||
Block_hash_map.empty
|
|
||||||
pendings in
|
|
||||||
if Block_hash_map.is_empty protocols then
|
|
||||||
0.
|
|
||||||
else begin
|
|
||||||
Block_hash_map.iter (fun _net_id -> net.request_protocols) protocols ;
|
|
||||||
1. +. Random.float 4.
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
module Protocol =
|
|
||||||
Persist.MakeImperativeProxy
|
|
||||||
(Store.Faked_functional_protocol)
|
|
||||||
(Protocol_hash_table) (Protocol_scheduler)
|
|
@ -1,58 +0,0 @@
|
|||||||
(**************************************************************************)
|
|
||||||
(* *)
|
|
||||||
(* Copyright (c) 2014 - 2016. *)
|
|
||||||
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
|
||||||
(* *)
|
|
||||||
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
|
||||||
(* *)
|
|
||||||
(**************************************************************************)
|
|
||||||
|
|
||||||
module type DISTRIBUTED_DB = sig
|
|
||||||
type t
|
|
||||||
type state
|
|
||||||
type store
|
|
||||||
type key
|
|
||||||
type value
|
|
||||||
val create: state -> store Persist.shared_ref -> t
|
|
||||||
val known: t -> key -> bool Lwt.t
|
|
||||||
val read: t -> key -> value option Lwt.t
|
|
||||||
val prefetch: t -> Store.net_id -> key -> unit
|
|
||||||
val fetch: t -> Store.net_id -> key -> value Lwt.t
|
|
||||||
val pending: t -> key -> bool
|
|
||||||
val store: t -> key -> value -> bool Lwt.t
|
|
||||||
val update: t -> key -> value -> bool Lwt.t
|
|
||||||
val remove: t -> key -> bool Lwt.t
|
|
||||||
val shutdown: t -> unit Lwt.t
|
|
||||||
|
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
type operation_state = {
|
|
||||||
request_operations: Store.net_id -> Operation_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
|
|
||||||
module Operation :
|
|
||||||
DISTRIBUTED_DB with type store := Store.Operation.t
|
|
||||||
and type key := Store.Operation.key
|
|
||||||
and type value := Store.Operation.value
|
|
||||||
and type state := operation_state
|
|
||||||
|
|
||||||
type block_state = {
|
|
||||||
request_blocks: Store.net_id -> Block_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
|
|
||||||
module Block :
|
|
||||||
DISTRIBUTED_DB with type store := Store.Block.t
|
|
||||||
and type key := Store.Block.key
|
|
||||||
and type value := Store.Block.value
|
|
||||||
and type state := block_state
|
|
||||||
|
|
||||||
|
|
||||||
type protocol_state = {
|
|
||||||
request_protocols: Protocol_hash.t list -> unit ;
|
|
||||||
}
|
|
||||||
module Protocol :
|
|
||||||
DISTRIBUTED_DB with type store := Store.Protocol.t
|
|
||||||
and type key := Store.Protocol.key
|
|
||||||
and type value := Store.Protocol.value
|
|
||||||
and type state := protocol_state
|
|
@ -25,7 +25,6 @@ module type STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module type BYTES_STORE = sig
|
module type BYTES_STORE = sig
|
||||||
@ -37,7 +36,6 @@ module type BYTES_STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module type TYPED_STORE = sig
|
module type TYPED_STORE = sig
|
||||||
@ -48,7 +46,6 @@ module type TYPED_STORE = sig
|
|||||||
val get: t -> key -> value option Lwt.t
|
val get: t -> key -> value option Lwt.t
|
||||||
val set: t -> key -> value -> t Lwt.t
|
val set: t -> key -> value -> t Lwt.t
|
||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module type KEY = sig
|
module type KEY = sig
|
||||||
@ -150,7 +147,6 @@ module MakeBytesStore
|
|||||||
let remove_rec s k =
|
let remove_rec s k =
|
||||||
S.remove_rec s (to_path k)
|
S.remove_rec s (to_path k)
|
||||||
|
|
||||||
let keys s = S.keys s >|= List.map of_path
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module MakeTypedStore
|
module MakeTypedStore
|
||||||
@ -172,7 +168,6 @@ module MakeTypedStore
|
|||||||
|
|
||||||
let raw_get = S.get
|
let raw_get = S.get
|
||||||
|
|
||||||
let keys = S.keys
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module RawKey = struct
|
module RawKey = struct
|
||||||
@ -375,8 +370,6 @@ module type IMPERATIVE_PROXY = sig
|
|||||||
val fetch: t -> rdata -> Store.key -> Store.value Lwt.t
|
val fetch: t -> rdata -> Store.key -> Store.value Lwt.t
|
||||||
val pending: t -> Store.key -> bool
|
val pending: t -> Store.key -> bool
|
||||||
val shutdown: t -> unit Lwt.t
|
val shutdown: t -> unit Lwt.t
|
||||||
|
|
||||||
val keys: t -> Store.key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module type IMPERATIVE_PROXY_SCHEDULER = sig
|
module type IMPERATIVE_PROXY_SCHEDULER = sig
|
||||||
@ -465,8 +458,6 @@ module MakeImperativeProxy
|
|||||||
let known { store } hash =
|
let known { store } hash =
|
||||||
use store (fun store -> Store.mem store hash)
|
use store (fun store -> Store.mem store hash)
|
||||||
|
|
||||||
let keys { store } = use store Store.keys
|
|
||||||
|
|
||||||
let read { store } hash =
|
let read { store } hash =
|
||||||
use store (fun store -> Store.get store hash)
|
use store (fun store -> Store.get store hash)
|
||||||
|
|
||||||
@ -538,8 +529,6 @@ module MakeImperativeProxy
|
|||||||
let shutdown { cancel ; worker } =
|
let shutdown { cancel ; worker } =
|
||||||
cancel () >>= fun () -> worker
|
cancel () >>= fun () -> worker
|
||||||
|
|
||||||
let keys { store } =
|
|
||||||
use store (fun store -> Store.keys store)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(*-- Predefined Instances ----------------------------------------------------*)
|
(*-- Predefined Instances ----------------------------------------------------*)
|
||||||
@ -592,14 +581,14 @@ module MakeHashResolver
|
|||||||
(H: HASH) = struct
|
(H: HASH) = struct
|
||||||
let plen = List.length Store.prefix
|
let plen = List.length Store.prefix
|
||||||
let build path =
|
let build path =
|
||||||
H.of_path @@
|
H.of_path_exn @@
|
||||||
Utils.remove_elem_from_list plen path
|
Utils.remove_elem_from_list plen path
|
||||||
let resolve t p =
|
let resolve t p =
|
||||||
let rec loop prefix = function
|
let rec loop prefix = function
|
||||||
| [] ->
|
| [] ->
|
||||||
Lwt.return [build prefix]
|
Lwt.return [build prefix]
|
||||||
| "" :: ds ->
|
| "" :: ds ->
|
||||||
Store.list t [ prefix] >>= fun prefixes ->
|
Store.list t [prefix] >>= fun prefixes ->
|
||||||
Lwt_list.map_p (fun prefix -> loop prefix ds) prefixes
|
Lwt_list.map_p (fun prefix -> loop prefix ds) prefixes
|
||||||
>|= List.flatten
|
>|= List.flatten
|
||||||
| [d] ->
|
| [d] ->
|
||||||
|
@ -28,7 +28,6 @@ module type STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** Projection of OCaml keys of some abstract type to concrete storage
|
(** Projection of OCaml keys of some abstract type to concrete storage
|
||||||
@ -57,8 +56,6 @@ module type BYTES_STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
|
|
||||||
val keys : t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module MakeBytesStore (S : STORE) (K : KEY) :
|
module MakeBytesStore (S : STORE) (K : KEY) :
|
||||||
@ -86,8 +83,6 @@ module type TYPED_STORE = sig
|
|||||||
val get: t -> key -> value option Lwt.t
|
val get: t -> key -> value option Lwt.t
|
||||||
val set: t -> key -> value -> t Lwt.t
|
val set: t -> key -> value -> t Lwt.t
|
||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
|
|
||||||
val keys: t -> key list Lwt.t (** Not always relevant, BEWARE! *)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** Gives a typed view of a store (values of a given type stored under
|
(** Gives a typed view of a store (values of a given type stored under
|
||||||
@ -176,57 +171,6 @@ module MakeBufferedPersistentMap
|
|||||||
and type value := C.t
|
and type value := C.t
|
||||||
and module Map := Map
|
and module Map := Map
|
||||||
|
|
||||||
(** {2 Imperative overlays} **************************************************)
|
|
||||||
|
|
||||||
type 'a shared_ref
|
|
||||||
val share : 'a -> 'a shared_ref
|
|
||||||
val update : 'a shared_ref -> ('a -> 'a option Lwt.t) -> bool Lwt.t
|
|
||||||
val update_with_res :
|
|
||||||
'a shared_ref -> ('a -> ('a option * 'b) Lwt.t) -> (bool * 'b) Lwt.t
|
|
||||||
val use : 'a shared_ref -> ('a -> 'b Lwt.t) -> 'b Lwt.t
|
|
||||||
|
|
||||||
module type IMPERATIVE_PROXY = sig
|
|
||||||
module Store : TYPED_STORE
|
|
||||||
|
|
||||||
type t
|
|
||||||
type rdata
|
|
||||||
type state
|
|
||||||
val create: state -> Store.t shared_ref -> t
|
|
||||||
val known: t -> Store.key -> bool Lwt.t
|
|
||||||
val read: t -> Store.key -> Store.value option Lwt.t
|
|
||||||
val store: t -> Store.key -> Store.value -> bool Lwt.t
|
|
||||||
val update: t -> Store.key -> Store.value -> bool Lwt.t
|
|
||||||
val remove: t -> Store.key -> bool Lwt.t
|
|
||||||
val prefetch: t -> rdata -> Store.key -> unit
|
|
||||||
val fetch: t -> rdata -> Store.key -> Store.value Lwt.t
|
|
||||||
val pending: t -> Store.key -> bool
|
|
||||||
val shutdown: t -> unit Lwt.t
|
|
||||||
|
|
||||||
val keys: t -> Store.key list Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
module type IMPERATIVE_PROXY_SCHEDULER = sig
|
|
||||||
module Store : TYPED_STORE
|
|
||||||
type state
|
|
||||||
type rdata
|
|
||||||
type data
|
|
||||||
|
|
||||||
val name : string
|
|
||||||
val init_request :
|
|
||||||
state -> Store.key -> data Lwt.t
|
|
||||||
val request :
|
|
||||||
state ->
|
|
||||||
get:(rdata -> Store.key -> Store.value Lwt.t) ->
|
|
||||||
set:(Store.key -> Store.value -> unit Lwt.t) ->
|
|
||||||
(Store.key * data * rdata) list -> float
|
|
||||||
end
|
|
||||||
|
|
||||||
module MakeImperativeProxy
|
|
||||||
(Store : TYPED_STORE)
|
|
||||||
(Table : Hashtbl.S with type key = Store.key)
|
|
||||||
(Scheduler : IMPERATIVE_PROXY_SCHEDULER with module Store := Store)
|
|
||||||
: IMPERATIVE_PROXY with module Store := Store and type state = Scheduler.state
|
|
||||||
and type rdata = Scheduler.rdata
|
|
||||||
|
|
||||||
(** {2 Predefined Instances} *************************************************)
|
(** {2 Predefined Instances} *************************************************)
|
||||||
|
|
||||||
|
98
src/node/db/raw_store.ml
Normal file
98
src/node/db/raw_store.ml
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Logging.Db
|
||||||
|
|
||||||
|
type t = { root : string }
|
||||||
|
|
||||||
|
let init root =
|
||||||
|
IO.check_dir root >>=? fun () ->
|
||||||
|
return { root }
|
||||||
|
|
||||||
|
type key = string list
|
||||||
|
type value = MBytes.t
|
||||||
|
|
||||||
|
let file_of_key { root } key =
|
||||||
|
String.concat Filename.dir_sep (root :: key)
|
||||||
|
|
||||||
|
let dir_of_key { root } key =
|
||||||
|
let dir = List.rev @@ List.tl (List.rev key) in
|
||||||
|
String.concat Filename.dir_sep (root :: dir)
|
||||||
|
|
||||||
|
let known s k =
|
||||||
|
let file = file_of_key s k in
|
||||||
|
Lwt.return (Sys.file_exists file && not (Sys.is_directory file))
|
||||||
|
|
||||||
|
let known_dir s k =
|
||||||
|
let file = file_of_key s k in
|
||||||
|
Lwt.return (Sys.file_exists file && Sys.is_directory file)
|
||||||
|
|
||||||
|
let read_opt s k =
|
||||||
|
let file = file_of_key s k in
|
||||||
|
if Sys.file_exists file && not (Sys.is_directory file) then
|
||||||
|
Lwt.catch
|
||||||
|
(fun () ->
|
||||||
|
IO.with_file_in file
|
||||||
|
(fun ba -> Lwt.return (Some ba)))
|
||||||
|
(fun e ->
|
||||||
|
warn "warn: can't read %s: %s"
|
||||||
|
file (Printexc.to_string e);
|
||||||
|
Lwt.return_none)
|
||||||
|
else
|
||||||
|
Lwt.return_none
|
||||||
|
|
||||||
|
type error += Unknown of string list
|
||||||
|
|
||||||
|
let read t key =
|
||||||
|
read_opt t key >>= function
|
||||||
|
| None -> fail (Unknown key)
|
||||||
|
| Some v -> return v
|
||||||
|
|
||||||
|
let read_exn t key =
|
||||||
|
read_opt t key >>= function
|
||||||
|
| None -> Lwt.fail Not_found
|
||||||
|
| Some v -> Lwt.return v
|
||||||
|
|
||||||
|
let remove s k =
|
||||||
|
IO.remove_file ~cleanup:true (file_of_key s k)
|
||||||
|
|
||||||
|
let store s k v =
|
||||||
|
let file = file_of_key s k in
|
||||||
|
IO.remove_file ~cleanup:false file >>= fun () ->
|
||||||
|
IO.with_file_out file v
|
||||||
|
|
||||||
|
let fold s k ~init ~f =
|
||||||
|
let dir = file_of_key s k in
|
||||||
|
IO.fold dir
|
||||||
|
~init
|
||||||
|
~f:(fun file acc ->
|
||||||
|
if IO.is_directory (Filename.concat dir file) then
|
||||||
|
f (`Dir (k @ [file])) acc
|
||||||
|
else
|
||||||
|
f (`Key (k @ [file])) acc)
|
||||||
|
|
||||||
|
let fold_keys s k ~init ~f =
|
||||||
|
let rec loop k acc =
|
||||||
|
fold s k ~init:acc
|
||||||
|
~f:(fun file acc ->
|
||||||
|
match file with
|
||||||
|
| `Key k -> f k acc
|
||||||
|
| `Dir k -> loop k acc) in
|
||||||
|
loop k init
|
||||||
|
|
||||||
|
let keys t = fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc))
|
||||||
|
|
||||||
|
let remove_dir s k =
|
||||||
|
let rec loop k =
|
||||||
|
fold s k ~init:()
|
||||||
|
~f:(fun file () ->
|
||||||
|
match file with
|
||||||
|
| `Key k -> remove s k
|
||||||
|
| `Dir k -> loop k) in
|
||||||
|
loop k
|
@ -7,8 +7,9 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
type worker
|
open Store_sigs
|
||||||
|
|
||||||
val create_worker: Tezos_p2p.net -> State.t -> worker
|
include STORE
|
||||||
|
|
||||||
|
val init: string -> t tzresult Lwt.t
|
||||||
|
|
||||||
val shutdown: worker -> unit Lwt.t
|
|
1024
src/node/db/store.ml
1024
src/node/db/store.ml
File diff suppressed because it is too large
Load Diff
@ -7,223 +7,221 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
(** Tezos - Simple imperative (key x value) store *)
|
open Store_sigs
|
||||||
|
|
||||||
type key = string list
|
type t
|
||||||
type value = MBytes.t
|
type global_store = t
|
||||||
|
|
||||||
module type TYPED_IMPERATIVE_STORE = sig
|
|
||||||
type t
|
|
||||||
type key
|
|
||||||
type value
|
|
||||||
val mem: t -> key -> bool Lwt.t
|
|
||||||
val get: t -> key -> value option Lwt.t
|
|
||||||
val get_exn: t -> key -> value Lwt.t
|
|
||||||
val set: t -> key -> value -> unit Lwt.t
|
|
||||||
val del: t -> key -> unit Lwt.t
|
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
module type IMPERATIVE_STORE = sig
|
|
||||||
type t
|
|
||||||
val mem: t -> key -> bool Lwt.t
|
|
||||||
val dir_mem: t -> key -> bool Lwt.t
|
|
||||||
val get: t -> key -> value option Lwt.t
|
|
||||||
val get_exn: t -> key -> value Lwt.t
|
|
||||||
val set: t -> key -> value -> unit Lwt.t
|
|
||||||
val del: t -> key -> unit Lwt.t
|
|
||||||
val list: t -> key list -> key list Lwt.t
|
|
||||||
val remove_rec: t -> key -> unit Lwt.t
|
|
||||||
end
|
|
||||||
|
|
||||||
(** A generic (key x value) store. *)
|
|
||||||
type generic_store
|
|
||||||
type block_store
|
|
||||||
type blockchain_store
|
|
||||||
type operation_store
|
|
||||||
type protocol_store
|
|
||||||
|
|
||||||
type store = private {
|
|
||||||
block: block_store Persist.shared_ref ;
|
|
||||||
blockchain: blockchain_store Persist.shared_ref ;
|
|
||||||
operation: operation_store Persist.shared_ref ;
|
|
||||||
protocol: protocol_store Persist.shared_ref ;
|
|
||||||
global_store: generic_store Persist.shared_ref ;
|
|
||||||
net_init: ?expiration:Time.t -> genesis -> net_store Lwt.t ;
|
|
||||||
net_read: net_id -> net_store tzresult Lwt.t ;
|
|
||||||
net_destroy: net_store -> unit Lwt.t ;
|
|
||||||
}
|
|
||||||
|
|
||||||
and net_store = private {
|
|
||||||
net_genesis: genesis ;
|
|
||||||
net_expiration: Time.t option ;
|
|
||||||
net_store: generic_store Persist.shared_ref ;
|
|
||||||
}
|
|
||||||
|
|
||||||
and genesis = {
|
|
||||||
time: Time.t ;
|
|
||||||
block: Block_hash.t ;
|
|
||||||
protocol: Protocol_hash.t ;
|
|
||||||
}
|
|
||||||
|
|
||||||
and net_id = Net of Block_hash.t
|
|
||||||
|
|
||||||
val net_id_encoding: net_id Data_encoding.t
|
|
||||||
val pp_net_id: Format.formatter -> net_id -> unit
|
|
||||||
|
|
||||||
(** Open or initialize a store at a given path. *)
|
(** Open or initialize a store at a given path. *)
|
||||||
val init: string -> store Lwt.t
|
val init: string -> t tzresult Lwt.t
|
||||||
|
|
||||||
(** Lwt exn returned when function keys is not implemented *)
|
|
||||||
val undefined_key_fn : 'a Lwt.t
|
|
||||||
|
|
||||||
(** {2 Generic interface} ****************************************************)
|
(** {2 Net store} ************************************************************)
|
||||||
|
|
||||||
(** The generic primitives do work on the direct root, but in a
|
module Net_id : sig
|
||||||
"data/" subdirectory and do not colide with following block and
|
|
||||||
operation specific functions. *)
|
|
||||||
include IMPERATIVE_STORE with type t = generic_store
|
|
||||||
|
|
||||||
(** {2 Types} ****************************************************************)
|
type t = Id of Block_hash.t
|
||||||
|
type net_id = t
|
||||||
|
val encoding: net_id Data_encoding.t
|
||||||
|
val pp: Format.formatter -> net_id -> unit
|
||||||
|
val compare: net_id -> net_id -> int
|
||||||
|
val equal: net_id -> net_id -> bool
|
||||||
|
|
||||||
(** Raw operations in the database (partially parsed).
|
val of_bytes_exn: MBytes.t -> net_id
|
||||||
See [State.Operation.t] for detailled description. *)
|
val to_bytes: net_id -> MBytes.t
|
||||||
type shell_operation = {
|
|
||||||
net_id: net_id ;
|
|
||||||
}
|
|
||||||
type operation = {
|
|
||||||
shell: shell_operation ;
|
|
||||||
proto: MBytes.t ;
|
|
||||||
}
|
|
||||||
|
|
||||||
val shell_operation_encoding: shell_operation Data_encoding.t
|
module Set : Set.S with type elt = t
|
||||||
val operation_encoding: operation Data_encoding.t
|
module Map : Map.S with type key = t
|
||||||
|
module Table : Hashtbl.S with type key = t
|
||||||
(** Raw blocks in the database (partially parsed). *)
|
|
||||||
type shell_block = {
|
|
||||||
net_id: net_id ;
|
|
||||||
predecessor: Block_hash.t ;
|
|
||||||
timestamp: Time.t ;
|
|
||||||
fitness: MBytes.t list ;
|
|
||||||
operations: Operation_hash.t list ;
|
|
||||||
}
|
|
||||||
type block = {
|
|
||||||
shell: shell_block ;
|
|
||||||
proto: MBytes.t ;
|
|
||||||
}
|
|
||||||
val shell_block_encoding: shell_block Data_encoding.t
|
|
||||||
val block_encoding: block Data_encoding.t
|
|
||||||
|
|
||||||
type protocol = Tezos_compiler.Protocol.t
|
|
||||||
val protocol_encoding: protocol Data_encoding.t
|
|
||||||
|
|
||||||
(** {2 Block and operations store} ********************************************)
|
|
||||||
|
|
||||||
module Block : sig
|
|
||||||
|
|
||||||
val of_bytes: MBytes.t -> block option
|
|
||||||
val to_bytes: block -> MBytes.t
|
|
||||||
val hash: block -> Block_hash.t
|
|
||||||
|
|
||||||
include TYPED_IMPERATIVE_STORE
|
|
||||||
with type t = block_store
|
|
||||||
and type key = Block_hash.t
|
|
||||||
and type value =
|
|
||||||
Block_hash.t * block Time.timed_data option Lwt.t Lazy.t
|
|
||||||
|
|
||||||
val compare: block -> block -> int
|
|
||||||
val equal: block -> block -> bool
|
|
||||||
|
|
||||||
val raw_get: t -> Block_hash.t -> MBytes.t option Lwt.t
|
|
||||||
val full_get: t -> Block_hash.t -> block Time.timed_data option Lwt.t
|
|
||||||
|
|
||||||
val full_set: t -> Block_hash.t -> block Time.timed_data -> unit Lwt.t
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module Block_valid_succs : TYPED_IMPERATIVE_STORE
|
module Net : sig
|
||||||
with type t = generic_store
|
|
||||||
and type key = Block_hash.t
|
|
||||||
and type value = Block_hash_set.t
|
|
||||||
|
|
||||||
module Block_invalid_succs : TYPED_IMPERATIVE_STORE
|
val list: global_store -> Net_id.t list Lwt.t
|
||||||
with type t = generic_store
|
val destroy: global_store -> Net_id.t -> unit Lwt.t
|
||||||
and type key = Block_hash.t
|
|
||||||
and type value = Block_hash_set.t
|
|
||||||
|
|
||||||
module Blockchain : TYPED_IMPERATIVE_STORE
|
type store
|
||||||
with type t = blockchain_store
|
val get: global_store -> Net_id.t -> store
|
||||||
and type key = Block_hash.t
|
|
||||||
and type value = Time.t
|
|
||||||
|
|
||||||
module Blockchain_succ : TYPED_IMPERATIVE_STORE
|
module Genesis_time : SINGLE_STORE
|
||||||
with type t = blockchain_store
|
with type t := store
|
||||||
and type key = Block_hash.t
|
and type value := Time.t
|
||||||
and type value = Block_hash.t
|
|
||||||
|
|
||||||
module Blockchain_test_succ : TYPED_IMPERATIVE_STORE
|
module Genesis_protocol : SINGLE_STORE
|
||||||
with type t = blockchain_store
|
with type t := store
|
||||||
and type key = Block_hash.t
|
and type value := Protocol_hash.t
|
||||||
and type value = Block_hash.t
|
|
||||||
|
module Genesis_test_protocol : SINGLE_STORE
|
||||||
|
with type t := store
|
||||||
|
and type value := Protocol_hash.t
|
||||||
|
|
||||||
|
module Expiration : SINGLE_STORE
|
||||||
|
with type t := store
|
||||||
|
and type value := Time.t
|
||||||
|
|
||||||
|
module Forked_network_ttl : SINGLE_STORE
|
||||||
|
with type t := store
|
||||||
|
and type value := Int64.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Chain data} ***********************************************************)
|
||||||
|
|
||||||
|
module Chain : sig
|
||||||
|
|
||||||
|
type store
|
||||||
|
val get: Net.store -> store
|
||||||
|
|
||||||
|
module Current_head : SINGLE_STORE
|
||||||
|
with type t := store
|
||||||
|
and type value := Block_hash.t
|
||||||
|
|
||||||
|
module Known_heads : BUFFERED_SET_STORE
|
||||||
|
with type t := store
|
||||||
|
and type elt := Block_hash.t
|
||||||
|
and module Set := Block_hash.Set
|
||||||
|
|
||||||
|
module Valid_successors : BUFFERED_SET_STORE
|
||||||
|
with type t = store * Block_hash.t
|
||||||
|
and type elt := Block_hash.t
|
||||||
|
and module Set := Block_hash.Set
|
||||||
|
|
||||||
|
module Invalid_successors : BUFFERED_SET_STORE
|
||||||
|
with type t = store * Block_hash.t
|
||||||
|
and type elt := Block_hash.t
|
||||||
|
and module Set := Block_hash.Set
|
||||||
|
|
||||||
|
module Successor_in_chain : SINGLE_STORE
|
||||||
|
with type t = store * Block_hash.t
|
||||||
|
and type value := Block_hash.t
|
||||||
|
|
||||||
|
module In_chain_insertion_time : SINGLE_STORE
|
||||||
|
with type t = store * Block_hash.t
|
||||||
|
and type value := Time.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Generic signature} *****************************************************)
|
||||||
|
|
||||||
|
(** Generic signature for Operations, Block_header, and Protocol "tracked"
|
||||||
|
contents (i.e. with 'discovery_time', 'validtity', ...) *)
|
||||||
|
module type DATA_STORE = sig
|
||||||
|
|
||||||
|
type store
|
||||||
|
type key
|
||||||
|
type key_set
|
||||||
|
type value
|
||||||
|
|
||||||
|
val encoding: value Data_encoding.t
|
||||||
|
|
||||||
|
val compare: value -> value -> int
|
||||||
|
val equal: value -> value -> bool
|
||||||
|
|
||||||
|
val hash: value -> key
|
||||||
|
val hash_raw: MBytes.t -> key
|
||||||
|
|
||||||
|
module Discovery_time : MAP_STORE
|
||||||
|
with type t := store
|
||||||
|
and type key := key
|
||||||
|
and type value := Time.t
|
||||||
|
|
||||||
|
module Contents : SINGLE_STORE
|
||||||
|
with type t = store * key
|
||||||
|
and type value := value
|
||||||
|
|
||||||
|
module RawContents : SINGLE_STORE
|
||||||
|
with type t = store * key
|
||||||
|
and type value := MBytes.t
|
||||||
|
|
||||||
|
module Validation_time : SINGLE_STORE
|
||||||
|
with type t = store * key
|
||||||
|
and type value := Time.t
|
||||||
|
|
||||||
|
module Errors : MAP_STORE
|
||||||
|
with type t := store
|
||||||
|
and type key := key
|
||||||
|
and type value = error list
|
||||||
|
|
||||||
|
module Pending : BUFFERED_SET_STORE
|
||||||
|
with type t = store
|
||||||
|
and type elt := key
|
||||||
|
and type Set.t = key_set
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Operation store} *****************************************************)
|
||||||
|
|
||||||
module Operation : sig
|
module Operation : sig
|
||||||
|
|
||||||
val of_bytes: MBytes.t -> operation option
|
type shell_header = {
|
||||||
val to_bytes: operation -> MBytes.t
|
net_id: Net_id.t ;
|
||||||
|
}
|
||||||
|
val shell_header_encoding: shell_header Data_encoding.t
|
||||||
|
|
||||||
(** Computes the hash of a raw operation
|
type t = {
|
||||||
(including both abstract and parsed parts) *)
|
shell: shell_header ;
|
||||||
val hash: operation -> Operation_hash.t
|
proto: MBytes.t ;
|
||||||
|
}
|
||||||
|
|
||||||
include TYPED_IMPERATIVE_STORE
|
type store
|
||||||
with type t = operation_store
|
val get: Net.store -> store
|
||||||
|
|
||||||
|
include DATA_STORE
|
||||||
|
with type store := store
|
||||||
and type key = Operation_hash.t
|
and type key = Operation_hash.t
|
||||||
and type value = operation tzresult Time.timed_data
|
and type value = t
|
||||||
|
and type key_set = Operation_hash.Set.t
|
||||||
val compare: operation -> operation -> int
|
|
||||||
val equal: operation -> operation -> bool
|
|
||||||
|
|
||||||
val raw_get: t -> Operation_hash.t -> MBytes.t option Lwt.t
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Block header store} **************************************************)
|
||||||
|
|
||||||
|
module Block_header : sig
|
||||||
|
|
||||||
|
type shell_header = {
|
||||||
|
net_id: Net_id.t ;
|
||||||
|
predecessor: Block_hash.t ;
|
||||||
|
timestamp: Time.t ;
|
||||||
|
fitness: MBytes.t list ;
|
||||||
|
operations: Operation_hash.t list ;
|
||||||
|
}
|
||||||
|
val shell_header_encoding: shell_header Data_encoding.t
|
||||||
|
|
||||||
|
type t = {
|
||||||
|
shell: shell_header ;
|
||||||
|
proto: MBytes.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
type store
|
||||||
|
val get: Net.store -> store
|
||||||
|
|
||||||
|
include DATA_STORE
|
||||||
|
with type store := store
|
||||||
|
and type key = Block_hash.t
|
||||||
|
and type value = t
|
||||||
|
and type key_set = Block_hash.Set.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Protocol store} ******************************************************)
|
||||||
|
|
||||||
module Protocol : sig
|
module Protocol : sig
|
||||||
val of_bytes: MBytes.t -> Tezos_compiler.Protocol.t option
|
|
||||||
val to_bytes: Tezos_compiler.Protocol.t -> MBytes.t
|
|
||||||
val hash: Tezos_compiler.Protocol.t -> Protocol_hash.t
|
|
||||||
|
|
||||||
include TYPED_IMPERATIVE_STORE
|
type t = Tezos_compiler.Protocol.t
|
||||||
with type t = protocol_store
|
|
||||||
|
type store
|
||||||
|
val get: global_store -> store
|
||||||
|
|
||||||
|
include DATA_STORE
|
||||||
|
with type store := store
|
||||||
and type key = Protocol_hash.t
|
and type key = Protocol_hash.t
|
||||||
and type value = Tezos_compiler.Protocol.t tzresult Time.timed_data
|
and type value = t
|
||||||
|
and type key_set = Protocol_hash.Set.t
|
||||||
|
|
||||||
val raw_get: t -> Protocol_hash.t -> MBytes.t option Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(**/**) (* For testing only *)
|
|
||||||
|
|
||||||
(* module LwtUnixStore : sig *)
|
|
||||||
(* include Persist.STORE with type t = generic_store *)
|
|
||||||
(* val init : string -> t Lwt.t *)
|
|
||||||
(* end *)
|
|
||||||
|
|
||||||
module IrminPath = Irmin.Path.String_list
|
|
||||||
module MBytesContent : Irmin.Contents.S with type t = MBytes.t
|
|
||||||
and module Path = IrminPath
|
|
||||||
|
|
||||||
module Faked_functional_operation :
|
|
||||||
Persist.TYPED_STORE with type t = Operation.t
|
|
||||||
and type value = Operation.value
|
|
||||||
and type key = Operation.key
|
|
||||||
|
|
||||||
module Faked_functional_block :
|
|
||||||
Persist.TYPED_STORE with type t = Block.t
|
|
||||||
and type value = Block.value
|
|
||||||
and type key = Block.key
|
|
||||||
|
|
||||||
module Faked_functional_protocol :
|
|
||||||
Persist.TYPED_STORE with type t = Protocol.t
|
|
||||||
and type value = Protocol.value
|
|
||||||
and type key = Protocol.key
|
|
||||||
|
|
||||||
module Faked_functional_store : Persist.STORE with type t = t
|
|
||||||
|
357
src/node/db/store_helpers.ml
Normal file
357
src/node/db/store_helpers.ml
Normal file
@ -0,0 +1,357 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Store_sigs
|
||||||
|
|
||||||
|
module Make_value (V : ENCODED_VALUE) = struct
|
||||||
|
type t = V.t
|
||||||
|
let of_bytes b =
|
||||||
|
match Data_encoding.Binary.of_bytes V.encoding b with
|
||||||
|
| None -> generic_error "Cannot parse data" (* TODO personalize *)
|
||||||
|
| Some v -> ok v
|
||||||
|
let to_bytes = Data_encoding.Binary.to_bytes V.encoding
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_single_store (S : STORE) (N : NAME) (V : VALUE) = struct
|
||||||
|
type t = S.t
|
||||||
|
type value = V.t
|
||||||
|
let known t = S.known t N.name
|
||||||
|
let read t = S.read t N.name >>=? fun b -> Lwt.return (V.of_bytes b)
|
||||||
|
let read_opt t =
|
||||||
|
read t >|= function
|
||||||
|
| Error _ -> None
|
||||||
|
| Ok v -> Some v
|
||||||
|
let read_exn t =
|
||||||
|
read t >>= function
|
||||||
|
| Error _ -> Lwt.fail Not_found
|
||||||
|
| Ok v -> Lwt.return v
|
||||||
|
let store t v = S.store t N.name (V.to_bytes v)
|
||||||
|
let remove t = S.remove t N.name
|
||||||
|
end
|
||||||
|
|
||||||
|
let map_key f = function
|
||||||
|
|`Key k -> `Key (f k)
|
||||||
|
| `Dir k -> `Dir (f k)
|
||||||
|
|
||||||
|
module Make_substore (S : STORE) (N : NAME)
|
||||||
|
: STORE with type t = S.t = struct
|
||||||
|
type t = S.t
|
||||||
|
type key = string list
|
||||||
|
type value = MBytes.t
|
||||||
|
let name_length = List.length N.name
|
||||||
|
let to_key k = N.name @ k
|
||||||
|
let of_key k = Utils.remove_elem_from_list name_length k
|
||||||
|
let known t k = S.known t (to_key k)
|
||||||
|
let known_dir t k = S.known_dir t (to_key k)
|
||||||
|
let read t k = S.read t (to_key k)
|
||||||
|
let read_opt t k = S.read_opt t (to_key k)
|
||||||
|
let read_exn t k = S.read_exn t (to_key k)
|
||||||
|
let store t k v = S.store t (to_key k) v
|
||||||
|
let remove t k = S.remove t (to_key k)
|
||||||
|
let fold t k ~init ~f =
|
||||||
|
S.fold t (to_key k) ~init
|
||||||
|
~f:(fun k acc -> f (map_key of_key k) acc)
|
||||||
|
let keys t k = S.keys t (to_key k) >|= fun keys -> List.map of_key keys
|
||||||
|
let fold_keys t k ~init ~f =
|
||||||
|
S.fold_keys t (to_key k) ~init ~f:(fun k acc -> f (of_key k) acc)
|
||||||
|
let remove_dir t k = S.remove_dir t (to_key k)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_indexed_substore (S : STORE) (I : INDEX) = struct
|
||||||
|
|
||||||
|
type t = S.t
|
||||||
|
type key = I.t
|
||||||
|
|
||||||
|
module Store = struct
|
||||||
|
type t = S.t * I.t
|
||||||
|
type key = string list
|
||||||
|
type value = MBytes.t
|
||||||
|
let to_key i k =
|
||||||
|
assert (List.length (I.to_path i) = I.path_length) ;
|
||||||
|
I.to_path i @ k
|
||||||
|
let of_key k = Utils.remove_elem_from_list I.path_length k
|
||||||
|
let known (t,i) k = S.known t (to_key i k)
|
||||||
|
let known_dir (t,i) k = S.known_dir t (to_key i k)
|
||||||
|
let read (t,i) k = S.read t (to_key i k)
|
||||||
|
let read_opt (t,i) k = S.read_opt t (to_key i k)
|
||||||
|
let read_exn (t,i) k = S.read_exn t (to_key i k)
|
||||||
|
let store (t,i) k v = S.store t (to_key i k) v
|
||||||
|
let remove (t,i) k = S.remove t (to_key i k)
|
||||||
|
let fold (t,i) k ~init ~f =
|
||||||
|
S.fold t (to_key i k) ~init
|
||||||
|
~f:(fun k acc -> f (map_key of_key k) acc)
|
||||||
|
let keys (t,i) k = S.keys t (to_key i k) >|= fun keys -> List.map of_key keys
|
||||||
|
let fold_keys (t,i) k ~init ~f =
|
||||||
|
S.fold_keys t (to_key i k) ~init ~f:(fun k acc -> f (of_key k) acc)
|
||||||
|
let remove_dir (t,i) k = S.remove_dir t (to_key i k)
|
||||||
|
end
|
||||||
|
|
||||||
|
let remove_all t i = Store.remove_dir (t, i) []
|
||||||
|
|
||||||
|
let fold_indexes t ~init ~f =
|
||||||
|
let rec dig i path acc =
|
||||||
|
if i <= 0 then
|
||||||
|
match I.of_path path with
|
||||||
|
| None -> assert false
|
||||||
|
| Some path -> f path acc
|
||||||
|
else
|
||||||
|
S.fold t path ~init:acc ~f:begin fun k acc ->
|
||||||
|
match k with
|
||||||
|
| `Dir k -> dig (i-1) k acc
|
||||||
|
| `Key _ -> Lwt.return acc
|
||||||
|
end in
|
||||||
|
dig I.path_length [] init
|
||||||
|
|
||||||
|
let indexes t =
|
||||||
|
fold_indexes t ~init:[] ~f:(fun i acc -> Lwt.return (i :: acc))
|
||||||
|
|
||||||
|
let list t k = S.fold t k ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc))
|
||||||
|
let resolve_index t prefix =
|
||||||
|
let rec loop i prefix = function
|
||||||
|
| [] when i >= I.path_length -> begin
|
||||||
|
match I.of_path prefix with
|
||||||
|
| None -> assert false
|
||||||
|
| Some path -> Lwt.return [path]
|
||||||
|
end
|
||||||
|
| [] ->
|
||||||
|
list t prefix >>= fun prefixes ->
|
||||||
|
Lwt_list.map_p (function
|
||||||
|
| `Key prefix | `Dir prefix -> loop (i+1) prefix []) prefixes
|
||||||
|
>|= List.flatten
|
||||||
|
| [d] ->
|
||||||
|
if (i >= I.path_length) then invalid_arg "IO.resolve" ;
|
||||||
|
list t prefix >>= fun prefixes ->
|
||||||
|
Lwt_list.map_p (function
|
||||||
|
| `Key prefix | `Dir prefix ->
|
||||||
|
match Utils.remove_prefix d (List.hd (List.rev prefix)) with
|
||||||
|
| None -> Lwt.return_nil
|
||||||
|
| Some _ -> loop (i+1) prefix [])
|
||||||
|
prefixes
|
||||||
|
>|= List.flatten
|
||||||
|
| d :: ds ->
|
||||||
|
if (i >= I.path_length) then invalid_arg "IO.resolve" ;
|
||||||
|
S.known_dir t (prefix @ [d]) >>= function
|
||||||
|
| true -> loop (i+1) (prefix @ [d]) ds
|
||||||
|
| false -> Lwt.return_nil in
|
||||||
|
loop 0 [] prefix
|
||||||
|
|
||||||
|
module Make_set (N : NAME) = struct
|
||||||
|
type t = S.t
|
||||||
|
type elt = I.t
|
||||||
|
let inited = MBytes.of_string "inited"
|
||||||
|
let known s i = Store.known (s, i) N.name
|
||||||
|
let store s i = Store.store (s, i) N.name inited
|
||||||
|
let remove s i = Store.remove (s, i) N.name
|
||||||
|
let remove_all s =
|
||||||
|
fold_indexes s ~init:() ~f:(fun i () -> remove s i)
|
||||||
|
let fold s ~init ~f =
|
||||||
|
fold_indexes s ~init
|
||||||
|
~f:(fun i acc ->
|
||||||
|
known s i >>= function
|
||||||
|
| true -> f i acc
|
||||||
|
| false -> Lwt.return acc)
|
||||||
|
let elements s =
|
||||||
|
fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc))
|
||||||
|
let iter s ~f =
|
||||||
|
fold s ~init:() ~f:(fun p () -> f p)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_buffered_set (N : NAME) (Set : Set.S with type elt = I.t) = struct
|
||||||
|
include Make_set (N)
|
||||||
|
module Set = Set
|
||||||
|
let read_all s =
|
||||||
|
fold s ~init:Set.empty ~f:(fun i set -> Lwt.return (Set.add i set))
|
||||||
|
let store_all s new_set =
|
||||||
|
read_all s >>= fun old_set ->
|
||||||
|
Lwt_list.iter_p (remove s)
|
||||||
|
Set.(elements (diff old_set new_set)) >>= fun () ->
|
||||||
|
Lwt_list.iter_p (store s) Set.(elements (diff new_set old_set))
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_map (N : NAME) (V : VALUE) = struct
|
||||||
|
type t = S.t
|
||||||
|
type key = I.t
|
||||||
|
type value = V.t
|
||||||
|
let known s i = Store.known (s,i) N.name
|
||||||
|
let read s i =
|
||||||
|
Store.read (s,i) N.name >>=? fun b -> Lwt.return (V.of_bytes b)
|
||||||
|
let read_opt s i =
|
||||||
|
read s i >>= function
|
||||||
|
| Error _ -> Lwt.return_none
|
||||||
|
| Ok v -> Lwt.return (Some v)
|
||||||
|
let read_exn s i =
|
||||||
|
read s i >>= function
|
||||||
|
| Error _ -> Lwt.fail Not_found
|
||||||
|
| Ok v -> Lwt.return v
|
||||||
|
let store s i v = Store.store (s,i) N.name (V.to_bytes v)
|
||||||
|
let remove s i = Store.remove (s,i) N.name
|
||||||
|
let remove_all s = fold_indexes s ~init:() ~f:(fun i () -> remove s i)
|
||||||
|
let fold s ~init ~f =
|
||||||
|
fold_indexes s ~init
|
||||||
|
~f:(fun i acc ->
|
||||||
|
read_opt s i >>= function
|
||||||
|
| None -> Lwt.return acc
|
||||||
|
| Some v -> f i v acc)
|
||||||
|
let bindings s =
|
||||||
|
fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p,v) :: acc))
|
||||||
|
let iter s ~f =
|
||||||
|
fold s ~init:() ~f:(fun p v () -> f p v)
|
||||||
|
let fold_keys s ~init ~f =
|
||||||
|
fold_indexes s ~init
|
||||||
|
~f:(fun i acc ->
|
||||||
|
known s i >>= function
|
||||||
|
| false -> Lwt.return acc
|
||||||
|
| true -> f i acc)
|
||||||
|
let keys s =
|
||||||
|
fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc))
|
||||||
|
let iter_keys s ~f =
|
||||||
|
fold_keys s ~init:() ~f:(fun p () -> f p)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_buffered_map
|
||||||
|
(N : NAME) (V : VALUE)
|
||||||
|
(Map : Map.S with type key = I.t) = struct
|
||||||
|
include Make_map (N) (V)
|
||||||
|
module Map = Map
|
||||||
|
let read_all s =
|
||||||
|
fold s ~init:Map.empty ~f:(fun i v set -> Lwt.return (Map.add i v set))
|
||||||
|
let store_all s map =
|
||||||
|
remove_all s >>= fun () ->
|
||||||
|
Map.fold
|
||||||
|
(fun k v acc -> let res = store s k v in acc >>= fun () -> res)
|
||||||
|
map Lwt.return_unit
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_set (S : STORE) (I : INDEX) = struct
|
||||||
|
type t = S.t
|
||||||
|
type elt = I.t
|
||||||
|
let inited = MBytes.of_string "inited"
|
||||||
|
let known s i = S.known s (I.to_path i)
|
||||||
|
let store s i = S.store s (I.to_path i) inited
|
||||||
|
let remove s i = S.remove s (I.to_path i)
|
||||||
|
let remove_all s = S.remove_dir s []
|
||||||
|
|
||||||
|
let fold s ~init ~f =
|
||||||
|
let rec dig i path acc =
|
||||||
|
if i <= 1 then
|
||||||
|
S.fold s path ~init:acc ~f:begin fun k acc ->
|
||||||
|
match k with
|
||||||
|
| `Dir _ -> Lwt.return acc
|
||||||
|
| `Key file ->
|
||||||
|
match I.of_path file with
|
||||||
|
| None -> assert false
|
||||||
|
| Some p -> f p acc
|
||||||
|
end
|
||||||
|
else
|
||||||
|
S.fold s path ~init:acc ~f:begin fun k acc ->
|
||||||
|
match k with
|
||||||
|
| `Dir k ->
|
||||||
|
dig (i-1) k acc
|
||||||
|
| `Key _ ->
|
||||||
|
Lwt.return acc
|
||||||
|
end in
|
||||||
|
dig I.path_length [] init
|
||||||
|
|
||||||
|
let elements s =
|
||||||
|
fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc))
|
||||||
|
let iter s ~f =
|
||||||
|
fold s ~init:() ~f:(fun p () -> f p)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_buffered_set
|
||||||
|
(S : STORE) (I : INDEX) (Set : Set.S with type elt = I.t) = struct
|
||||||
|
include Make_set (S) (I)
|
||||||
|
module Set = Set
|
||||||
|
let read_all s =
|
||||||
|
fold s ~init:Set.empty ~f:(fun i set -> Lwt.return (Set.add i set))
|
||||||
|
let store_all s new_set =
|
||||||
|
read_all s >>= fun old_set ->
|
||||||
|
Lwt_list.iter_p (remove s) Set.(elements (diff old_set new_set)) >>= fun () ->
|
||||||
|
Lwt_list.iter_p (store s) Set.(elements (diff new_set old_set))
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_map (S : STORE) (I : INDEX) (V : VALUE) = struct
|
||||||
|
type t = S.t
|
||||||
|
type key = I.t
|
||||||
|
type value = V.t
|
||||||
|
let known s i = S.known s (I.to_path i)
|
||||||
|
let read s i =
|
||||||
|
S.read s (I.to_path i) >>=? fun b -> Lwt.return (V.of_bytes b)
|
||||||
|
let read_opt s i =
|
||||||
|
read s i >>= function
|
||||||
|
| Error _ -> Lwt.return_none
|
||||||
|
| Ok v -> Lwt.return (Some v)
|
||||||
|
let read_exn s i =
|
||||||
|
read s i >>= function
|
||||||
|
| Error _ -> Lwt.fail Not_found
|
||||||
|
| Ok v -> Lwt.return v
|
||||||
|
let store s i v = S.store s (I.to_path i) (V.to_bytes v)
|
||||||
|
let remove s i = S.remove s (I.to_path i)
|
||||||
|
let remove_all s = S.remove_dir s []
|
||||||
|
let fold s ~init ~f =
|
||||||
|
let rec dig i path acc =
|
||||||
|
if i <= 1 then
|
||||||
|
S.fold s path ~init:acc ~f:begin fun k acc ->
|
||||||
|
match k with
|
||||||
|
| `Dir _ -> Lwt.return acc
|
||||||
|
| `Key file ->
|
||||||
|
S.read_opt s file >>= function
|
||||||
|
| None -> Lwt.return acc
|
||||||
|
| Some b ->
|
||||||
|
match V.of_bytes b with
|
||||||
|
| Error _ ->
|
||||||
|
(* Silently ignore unparsable data *)
|
||||||
|
Lwt.return acc
|
||||||
|
| Ok v ->
|
||||||
|
match I.of_path file with
|
||||||
|
| None -> assert false
|
||||||
|
| Some path -> f path v acc
|
||||||
|
end
|
||||||
|
else
|
||||||
|
S.fold s path ~init:acc ~f:begin fun k acc ->
|
||||||
|
match k with
|
||||||
|
| `Dir k -> dig (i-1) k acc
|
||||||
|
| `Key _ -> Lwt.return acc
|
||||||
|
end in
|
||||||
|
dig I.path_length [] init
|
||||||
|
|
||||||
|
let bindings s =
|
||||||
|
fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p,v) :: acc))
|
||||||
|
let iter s ~f =
|
||||||
|
fold s ~init:() ~f:(fun p v () -> f p v)
|
||||||
|
let fold_keys s ~init ~f =
|
||||||
|
S.fold s [] ~init
|
||||||
|
~f:(fun p acc ->
|
||||||
|
match p with
|
||||||
|
| `Dir _ -> Lwt.return acc
|
||||||
|
| `Key p ->
|
||||||
|
match I.of_path p with
|
||||||
|
| None -> assert false
|
||||||
|
| Some path -> f path acc)
|
||||||
|
let keys s =
|
||||||
|
fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc))
|
||||||
|
let iter_keys s ~f =
|
||||||
|
fold_keys s ~init:() ~f:(fun p () -> f p)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_buffered_map
|
||||||
|
(S : STORE) (I : INDEX) (V : VALUE)
|
||||||
|
(Map : Map.S with type key = I.t) = struct
|
||||||
|
include Make_map (S) (I) (V)
|
||||||
|
module Map = Map
|
||||||
|
let read_all s =
|
||||||
|
fold s ~init:Map.empty ~f:(fun i v set -> Lwt.return (Map.add i v set))
|
||||||
|
let store_all s map =
|
||||||
|
remove_all s >>= fun () ->
|
||||||
|
Map.fold
|
||||||
|
(fun k v acc -> let res = store s k v in acc >>= fun () -> res)
|
||||||
|
map Lwt.return_unit
|
||||||
|
end
|
45
src/node/db/store_helpers.mli
Normal file
45
src/node/db/store_helpers.mli
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Store_sigs
|
||||||
|
|
||||||
|
module Make_value (V : ENCODED_VALUE) : VALUE with type t = V.t
|
||||||
|
|
||||||
|
module Make_single_store (S : STORE) (N : NAME) (V : VALUE)
|
||||||
|
: SINGLE_STORE with type t = S.t
|
||||||
|
and type value = V.t
|
||||||
|
|
||||||
|
module Make_substore (S : STORE) (N : NAME)
|
||||||
|
: STORE with type t = S.t
|
||||||
|
|
||||||
|
module Make_set (S : STORE) (I : INDEX)
|
||||||
|
: SET_STORE with type t = S.t and type elt = I.t
|
||||||
|
|
||||||
|
module Make_buffered_set
|
||||||
|
(S : STORE) (I : INDEX) (Set : Set.S with type elt = I.t)
|
||||||
|
: BUFFERED_SET_STORE with type t = S.t
|
||||||
|
and type elt = I.t
|
||||||
|
and module Set = Set
|
||||||
|
|
||||||
|
module Make_map
|
||||||
|
(S : STORE) (I : INDEX) (V : VALUE)
|
||||||
|
: MAP_STORE with type t = S.t
|
||||||
|
and type key = I.t
|
||||||
|
and type value = V.t
|
||||||
|
|
||||||
|
module Make_buffered_map
|
||||||
|
(S : STORE) (I : INDEX) (V : VALUE) (Map : Map.S with type key = I.t)
|
||||||
|
: BUFFERED_MAP_STORE with type t = S.t
|
||||||
|
and type key = I.t
|
||||||
|
and type value = V.t
|
||||||
|
and module Map = Map
|
||||||
|
|
||||||
|
module Make_indexed_substore (S : STORE) (I : INDEX)
|
||||||
|
: INDEXED_STORE with type t = S.t
|
||||||
|
and type key = I.t
|
149
src/node/db/store_sigs.ml
Normal file
149
src/node/db/store_sigs.ml
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module type NAME = sig
|
||||||
|
val name : string list
|
||||||
|
end
|
||||||
|
|
||||||
|
module type VALUE = sig
|
||||||
|
type t
|
||||||
|
val of_bytes: MBytes.t -> t tzresult
|
||||||
|
val to_bytes: t -> MBytes.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type ENCODED_VALUE = sig
|
||||||
|
type t
|
||||||
|
val encoding: t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INDEX = sig
|
||||||
|
type t
|
||||||
|
val path_length: int
|
||||||
|
val to_path: t -> string list
|
||||||
|
val of_path: string list -> t option
|
||||||
|
end
|
||||||
|
|
||||||
|
module type SINGLE_STORE = sig
|
||||||
|
type t
|
||||||
|
type value
|
||||||
|
val known: t -> bool Lwt.t
|
||||||
|
val read: t -> value tzresult Lwt.t
|
||||||
|
val read_opt: t -> value option Lwt.t
|
||||||
|
val read_exn: t -> value Lwt.t
|
||||||
|
val store: t -> value -> unit Lwt.t
|
||||||
|
val remove: t -> unit Lwt.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type STORE = sig
|
||||||
|
|
||||||
|
type t
|
||||||
|
type key = string list
|
||||||
|
type value = MBytes.t
|
||||||
|
|
||||||
|
val known: t -> key -> bool Lwt.t
|
||||||
|
val read: t -> key -> value tzresult Lwt.t
|
||||||
|
val read_opt: t -> key -> value option Lwt.t
|
||||||
|
val read_exn: t -> key -> value Lwt.t
|
||||||
|
val store: t -> key -> value -> unit Lwt.t
|
||||||
|
val remove: t -> key -> unit Lwt.t
|
||||||
|
|
||||||
|
val known_dir: t -> key -> bool Lwt.t
|
||||||
|
val remove_dir: t -> key -> unit Lwt.t
|
||||||
|
|
||||||
|
val fold:
|
||||||
|
t -> key -> init:'a ->
|
||||||
|
f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) ->
|
||||||
|
'a Lwt.t
|
||||||
|
|
||||||
|
val keys: t -> key -> key list Lwt.t
|
||||||
|
val fold_keys: t -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type SET_STORE = sig
|
||||||
|
type t
|
||||||
|
type elt
|
||||||
|
val known: t -> elt -> bool Lwt.t
|
||||||
|
val store: t -> elt -> unit Lwt.t
|
||||||
|
val remove: t -> elt -> unit Lwt.t
|
||||||
|
val elements: t -> elt list Lwt.t
|
||||||
|
val remove_all: t -> unit Lwt.t
|
||||||
|
val iter: t -> f:(elt -> unit Lwt.t) -> unit Lwt.t
|
||||||
|
val fold: t -> init:'a -> f:(elt -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type BUFFERED_SET_STORE = sig
|
||||||
|
include SET_STORE
|
||||||
|
module Set : Set.S with type elt = elt
|
||||||
|
val read_all: t -> Set.t Lwt.t
|
||||||
|
val store_all: t -> Set.t -> unit Lwt.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type MAP_STORE = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
type value
|
||||||
|
val known: t -> key -> bool Lwt.t
|
||||||
|
val read: t -> key -> value tzresult Lwt.t
|
||||||
|
val read_opt: t -> key -> value option Lwt.t
|
||||||
|
val read_exn: t -> key -> value Lwt.t
|
||||||
|
val store: t -> key -> value -> unit Lwt.t
|
||||||
|
val remove: t -> key -> unit Lwt.t
|
||||||
|
val keys: t -> key list Lwt.t
|
||||||
|
val bindings: t -> (key * value) list Lwt.t
|
||||||
|
val remove_all: t -> unit Lwt.t
|
||||||
|
val iter: t -> f:(key -> value -> unit Lwt.t) -> unit Lwt.t
|
||||||
|
val iter_keys: t -> f:(key -> unit Lwt.t) -> unit Lwt.t
|
||||||
|
val fold: t -> init:'a -> f:(key -> value -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
val fold_keys: t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type BUFFERED_MAP_STORE = sig
|
||||||
|
include MAP_STORE
|
||||||
|
module Map : Map.S with type key = key
|
||||||
|
val read_all: t -> value Map.t Lwt.t
|
||||||
|
val store_all: t -> value Map.t -> unit Lwt.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INDEXED_STORE = sig
|
||||||
|
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
|
||||||
|
module Store : STORE with type t = t * key
|
||||||
|
|
||||||
|
val remove_all: t -> key -> unit Lwt.t
|
||||||
|
|
||||||
|
val fold_indexes: t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
val indexes: t -> key list Lwt.t
|
||||||
|
|
||||||
|
val resolve_index: t -> string list -> key list Lwt.t
|
||||||
|
|
||||||
|
module Make_set (N : NAME)
|
||||||
|
: SET_STORE with type t = t
|
||||||
|
and type elt = key
|
||||||
|
|
||||||
|
module Make_buffered_set (N : NAME) (Set : Set.S with type elt = key)
|
||||||
|
: BUFFERED_SET_STORE with type t = t
|
||||||
|
and type elt = key
|
||||||
|
and module Set = Set
|
||||||
|
|
||||||
|
module Make_map (N : NAME) (V : VALUE)
|
||||||
|
: MAP_STORE with type t = t
|
||||||
|
and type key = key
|
||||||
|
and type value = V.t
|
||||||
|
|
||||||
|
module Make_buffered_map
|
||||||
|
(N : NAME) (V : VALUE) (Map : Map.S with type key = key)
|
||||||
|
: BUFFERED_MAP_STORE with type t = t
|
||||||
|
and type key = key
|
||||||
|
and type value = V.t
|
||||||
|
and module Map = Map
|
||||||
|
|
||||||
|
end
|
@ -9,8 +9,8 @@
|
|||||||
|
|
||||||
open Logging.Node.Main
|
open Logging.Node.Main
|
||||||
|
|
||||||
let genesis = {
|
let genesis : State.Net.genesis = {
|
||||||
Store.time =
|
time =
|
||||||
Time.of_notation_exn "2016-11-01T00:00:00Z" ;
|
Time.of_notation_exn "2016-11-01T00:00:00Z" ;
|
||||||
block =
|
block =
|
||||||
Block_hash.of_b58check
|
Block_hash.of_b58check
|
||||||
|
@ -246,31 +246,42 @@ module Real = struct
|
|||||||
lwt_debug "message sent to %a"
|
lwt_debug "message sent to %a"
|
||||||
Connection_info.pp
|
Connection_info.pp
|
||||||
(P2p_connection_pool.connection_info conn) >>= fun () ->
|
(P2p_connection_pool.connection_info conn) >>= fun () ->
|
||||||
Lwt.return_unit
|
return ()
|
||||||
| Error _ ->
|
| Error err ->
|
||||||
lwt_debug "error sending message from %a"
|
lwt_debug "error sending message from %a: %a"
|
||||||
Connection_info.pp
|
Connection_info.pp
|
||||||
(P2p_connection_pool.connection_info conn) >>= fun () ->
|
(P2p_connection_pool.connection_info conn)
|
||||||
Lwt.fail End_of_file (* temporary *)
|
pp_print_error err >>= fun () ->
|
||||||
|
Lwt.return (Error err)
|
||||||
|
|
||||||
let try_send _net conn v =
|
let try_send _net conn v =
|
||||||
match P2p_connection_pool.write_now conn v with
|
match P2p_connection_pool.write_now conn v with
|
||||||
| Ok v ->
|
| Ok v ->
|
||||||
Lwt.ignore_result
|
debug "message trysent to %a"
|
||||||
(lwt_debug "message trysent to %a"
|
Connection_info.pp
|
||||||
Connection_info.pp
|
(P2p_connection_pool.connection_info conn) ;
|
||||||
(P2p_connection_pool.connection_info conn)) ;
|
|
||||||
v
|
v
|
||||||
| Error _ ->
|
| Error err ->
|
||||||
Lwt.ignore_result
|
debug "error trysending message to %a@ %a"
|
||||||
(lwt_debug "error trysending message to %a"
|
Connection_info.pp
|
||||||
Connection_info.pp
|
(P2p_connection_pool.connection_info conn)
|
||||||
(P2p_connection_pool.connection_info conn)) ;
|
pp_print_error err ;
|
||||||
false
|
false
|
||||||
|
|
||||||
let broadcast { pool } msg =
|
let broadcast { pool } msg =
|
||||||
P2p_connection_pool.write_all pool msg ;
|
P2p_connection_pool.write_all pool msg ;
|
||||||
Lwt.ignore_result (lwt_debug "message broadcasted")
|
debug "message broadcasted"
|
||||||
|
|
||||||
|
let fold_connections { pool } ~init ~f =
|
||||||
|
P2p_connection_pool.fold_connections pool ~init ~f
|
||||||
|
|
||||||
|
let iter_connections { pool } f =
|
||||||
|
P2p_connection_pool.fold_connections pool
|
||||||
|
~init:()
|
||||||
|
~f:(fun gid conn () -> f gid conn)
|
||||||
|
|
||||||
|
let on_new_connection { pool } f =
|
||||||
|
P2p_connection_pool.on_new_connection pool f
|
||||||
|
|
||||||
let pool { pool } = pool
|
let pool { pool } = pool
|
||||||
end
|
end
|
||||||
@ -308,10 +319,14 @@ type ('msg, 'meta) t = {
|
|||||||
set_metadata : Peer_id.t -> 'meta -> unit ;
|
set_metadata : Peer_id.t -> 'meta -> unit ;
|
||||||
recv : ('msg, 'meta) connection -> 'msg tzresult Lwt.t ;
|
recv : ('msg, 'meta) connection -> 'msg tzresult Lwt.t ;
|
||||||
recv_any : unit -> (('msg, 'meta) connection * 'msg) Lwt.t ;
|
recv_any : unit -> (('msg, 'meta) connection * 'msg) Lwt.t ;
|
||||||
send : ('msg, 'meta) connection -> 'msg -> unit Lwt.t ;
|
send : ('msg, 'meta) connection -> 'msg -> unit tzresult Lwt.t ;
|
||||||
try_send : ('msg, 'meta) connection -> 'msg -> bool ;
|
try_send : ('msg, 'meta) connection -> 'msg -> bool ;
|
||||||
broadcast : 'msg -> unit ;
|
broadcast : 'msg -> unit ;
|
||||||
pool : ('msg, 'meta) P2p_connection_pool.t option ;
|
pool : ('msg, 'meta) P2p_connection_pool.t option ;
|
||||||
|
fold_connections :
|
||||||
|
'a. init:'a -> f:(Peer_id.t -> ('msg, 'meta) connection -> 'a -> 'a) -> 'a ;
|
||||||
|
iter_connections : (Peer_id.t -> ('msg, 'meta) connection -> unit) -> unit ;
|
||||||
|
on_new_connection : (Peer_id.t -> ('msg, 'meta) connection -> unit) -> unit ;
|
||||||
}
|
}
|
||||||
type ('msg, 'meta) net = ('msg, 'meta) t
|
type ('msg, 'meta) net = ('msg, 'meta) t
|
||||||
|
|
||||||
@ -335,6 +350,9 @@ let create ~config ~limits meta_cfg msg_cfg =
|
|||||||
try_send = Real.try_send net ;
|
try_send = Real.try_send net ;
|
||||||
broadcast = Real.broadcast net ;
|
broadcast = Real.broadcast net ;
|
||||||
pool = Some net.pool ;
|
pool = Some net.pool ;
|
||||||
|
fold_connections = (fun ~init ~f -> Real.fold_connections net ~init ~f) ;
|
||||||
|
iter_connections = Real.iter_connections net ;
|
||||||
|
on_new_connection = Real.on_new_connection net ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let faked_network = {
|
let faked_network = {
|
||||||
@ -351,8 +369,11 @@ let faked_network = {
|
|||||||
set_metadata = (fun _ _ -> ()) ;
|
set_metadata = (fun _ _ -> ()) ;
|
||||||
recv = (fun _ -> Lwt_utils.never_ending) ;
|
recv = (fun _ -> Lwt_utils.never_ending) ;
|
||||||
recv_any = (fun () -> Lwt_utils.never_ending) ;
|
recv_any = (fun () -> Lwt_utils.never_ending) ;
|
||||||
send = (fun _ _ -> Lwt_utils.never_ending) ;
|
send = (fun _ _ -> fail P2p_connection_pool.Connection_closed) ;
|
||||||
try_send = (fun _ _ -> false) ;
|
try_send = (fun _ _ -> false) ;
|
||||||
|
fold_connections = (fun ~init ~f:_ -> init) ;
|
||||||
|
iter_connections = (fun _f -> ()) ;
|
||||||
|
on_new_connection = (fun _f -> ()) ;
|
||||||
broadcast = ignore ;
|
broadcast = ignore ;
|
||||||
pool = None
|
pool = None
|
||||||
}
|
}
|
||||||
@ -373,6 +394,9 @@ let recv_any net = net.recv_any ()
|
|||||||
let send net = net.send
|
let send net = net.send
|
||||||
let try_send net = net.try_send
|
let try_send net = net.try_send
|
||||||
let broadcast net = net.broadcast
|
let broadcast net = net.broadcast
|
||||||
|
let fold_connections net = net.fold_connections
|
||||||
|
let iter_connections net = net.iter_connections
|
||||||
|
let on_new_connection net = net.on_new_connection
|
||||||
|
|
||||||
module Raw = struct
|
module Raw = struct
|
||||||
type 'a t = 'a P2p_connection_pool.Message.t =
|
type 'a t = 'a P2p_connection_pool.Message.t =
|
||||||
|
@ -179,7 +179,7 @@ val recv_any :
|
|||||||
(** [send net peer msg] is a thread that returns when [msg] has been
|
(** [send net peer msg] is a thread that returns when [msg] has been
|
||||||
successfully enqueued in the send queue. *)
|
successfully enqueued in the send queue. *)
|
||||||
val send :
|
val send :
|
||||||
('msg, 'meta) net -> ('msg, 'meta) connection -> 'msg -> unit Lwt.t
|
('msg, 'meta) net -> ('msg, 'meta) connection -> 'msg -> unit tzresult Lwt.t
|
||||||
|
|
||||||
(** [try_send net peer msg] is [true] if [msg] has been added to the
|
(** [try_send net peer msg] is [true] if [msg] has been added to the
|
||||||
send queue for [peer], [false] otherwise *)
|
send queue for [peer], [false] otherwise *)
|
||||||
@ -281,6 +281,18 @@ module RPC : sig
|
|||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
val fold_connections :
|
||||||
|
('msg, 'meta) net ->
|
||||||
|
init:'a -> f:(Peer_id.t -> ('msg, 'meta) connection -> 'a -> 'a) -> 'a
|
||||||
|
|
||||||
|
val iter_connections :
|
||||||
|
('msg, 'meta) net ->
|
||||||
|
(Peer_id.t -> ('msg, 'meta) connection -> unit) -> unit
|
||||||
|
|
||||||
|
val on_new_connection :
|
||||||
|
('msg, 'meta) net ->
|
||||||
|
(Peer_id.t -> ('msg, 'meta) connection -> unit) -> unit
|
||||||
|
|
||||||
(**/**)
|
(**/**)
|
||||||
module Raw : sig
|
module Raw : sig
|
||||||
type 'a t =
|
type 'a t =
|
||||||
|
@ -293,6 +293,8 @@ type ('msg, 'meta) t = {
|
|||||||
encoding : 'msg Message.t Data_encoding.t ;
|
encoding : 'msg Message.t Data_encoding.t ;
|
||||||
events : events ;
|
events : events ;
|
||||||
watcher : LogEvent.t Watcher.input ;
|
watcher : LogEvent.t Watcher.input ;
|
||||||
|
mutable new_connection_hook :
|
||||||
|
(Peer_id.t -> ('msg, 'meta) connection -> unit) list ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -490,6 +492,7 @@ let create_connection pool conn id_point pi gi _version =
|
|||||||
end ;
|
end ;
|
||||||
P2p_connection.close ~wait:conn.wait_close conn.conn
|
P2p_connection.close ~wait:conn.wait_close conn.conn
|
||||||
end ;
|
end ;
|
||||||
|
List.iter (fun f -> f peer_id conn) pool.new_connection_hook ;
|
||||||
if active_connections pool < pool.config.min_connections then begin
|
if active_connections pool < pool.config.min_connections then begin
|
||||||
Lwt_condition.broadcast pool.events.too_few_connections () ;
|
Lwt_condition.broadcast pool.events.too_few_connections () ;
|
||||||
LogEvent.too_few_connections pool.watcher ;
|
LogEvent.too_few_connections pool.watcher ;
|
||||||
@ -525,7 +528,7 @@ let authenticate pool ?pi canceler fd point =
|
|||||||
end ~on_error: begin fun err ->
|
end ~on_error: begin fun err ->
|
||||||
(* Authentication incorrect! *)
|
(* Authentication incorrect! *)
|
||||||
(* TODO do something when the error is Not_enough_proof_of_work ?? *)
|
(* TODO do something when the error is Not_enough_proof_of_work ?? *)
|
||||||
lwt_debug "authenticate: %a%s -> failed %a"
|
lwt_debug "@[authenticate: %a%s -> failed@ %a@]"
|
||||||
Point.pp point
|
Point.pp point
|
||||||
(if incoming then " incoming" else "")
|
(if incoming then " incoming" else "")
|
||||||
pp_print_error err >>= fun () ->
|
pp_print_error err >>= fun () ->
|
||||||
@ -786,6 +789,7 @@ module Peer_ids = struct
|
|||||||
|
|
||||||
let fold_known pool ~init ~f =
|
let fold_known pool ~init ~f =
|
||||||
Peer_id.Table.fold f pool.known_peer_ids init
|
Peer_id.Table.fold f pool.known_peer_ids init
|
||||||
|
|
||||||
let fold_connected pool ~init ~f =
|
let fold_connected pool ~init ~f =
|
||||||
Peer_id.Table.fold f pool.connected_peer_ids init
|
Peer_id.Table.fold f pool.connected_peer_ids init
|
||||||
|
|
||||||
@ -866,6 +870,7 @@ let create config meta_config message_config io_sched =
|
|||||||
encoding = Message.encoding message_config.encoding ;
|
encoding = Message.encoding message_config.encoding ;
|
||||||
events ;
|
events ;
|
||||||
watcher = Watcher.create_input () ;
|
watcher = Watcher.create_input () ;
|
||||||
|
new_connection_hook = [] ;
|
||||||
} in
|
} in
|
||||||
List.iter (Points.set_trusted pool) config.trusted_points ;
|
List.iter (Points.set_trusted pool) config.trusted_points ;
|
||||||
Peer_info.File.load config.peers_file meta_config.encoding >>= function
|
Peer_info.File.load config.peers_file meta_config.encoding >>= function
|
||||||
@ -899,3 +904,6 @@ let destroy pool =
|
|||||||
Point.Table.fold (fun _point canceler acc ->
|
Point.Table.fold (fun _point canceler acc ->
|
||||||
Canceler.cancel canceler >>= fun () -> acc)
|
Canceler.cancel canceler >>= fun () -> acc)
|
||||||
pool.incoming Lwt.return_unit
|
pool.incoming Lwt.return_unit
|
||||||
|
|
||||||
|
let on_new_connection pool f =
|
||||||
|
pool.new_connection_hook <- f :: pool.new_connection_hook
|
||||||
|
@ -257,6 +257,10 @@ val fold_connections:
|
|||||||
f:(Peer_id.t -> ('msg, 'meta) connection -> 'a -> 'a) ->
|
f:(Peer_id.t -> ('msg, 'meta) connection -> 'a -> 'a) ->
|
||||||
'a
|
'a
|
||||||
|
|
||||||
|
val on_new_connection:
|
||||||
|
('msg, 'meta) pool ->
|
||||||
|
(Peer_id.t -> ('msg, 'meta) connection -> unit) -> unit
|
||||||
|
|
||||||
(** {1 I/O on connections} *)
|
(** {1 I/O on connections} *)
|
||||||
|
|
||||||
type error += Connection_closed
|
type error += Connection_closed
|
||||||
|
@ -101,13 +101,7 @@ module Stat = struct
|
|||||||
(req "current_outflow" int31))
|
(req "current_outflow" int31))
|
||||||
end
|
end
|
||||||
|
|
||||||
module Peer_id = struct
|
module Peer_id = Crypto_box.Public_key_hash
|
||||||
include Crypto_box.Public_key_hash
|
|
||||||
let pp = pp_short
|
|
||||||
module Map = Map.Make (Crypto_box.Public_key_hash)
|
|
||||||
module Set = Set.Make (Crypto_box.Public_key_hash)
|
|
||||||
module Table = Hash.Hash_table (Crypto_box.Public_key_hash)
|
|
||||||
end
|
|
||||||
|
|
||||||
(* public types *)
|
(* public types *)
|
||||||
type addr = Ipaddr.V6.t
|
type addr = Ipaddr.V6.t
|
||||||
|
@ -33,6 +33,7 @@ module Peer_id : sig
|
|||||||
val compare : t -> t -> int
|
val compare : t -> t -> int
|
||||||
val equal : t -> t -> bool
|
val equal : t -> t -> bool
|
||||||
val pp : Format.formatter -> t -> unit
|
val pp : Format.formatter -> t -> unit
|
||||||
|
val pp_short : Format.formatter -> t -> unit
|
||||||
val encoding : t Data_encoding.t
|
val encoding : t Data_encoding.t
|
||||||
module Map : Map.S with type key = t
|
module Map : Map.S with type key = t
|
||||||
module Set : Set.S with type elt = t
|
module Set : Set.S with type elt = t
|
||||||
|
@ -1,46 +0,0 @@
|
|||||||
(**************************************************************************)
|
|
||||||
(* *)
|
|
||||||
(* Copyright (c) 2014 - 2016. *)
|
|
||||||
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
|
||||||
(* *)
|
|
||||||
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
|
||||||
(* *)
|
|
||||||
(**************************************************************************)
|
|
||||||
|
|
||||||
type worker = {
|
|
||||||
shutdown: unit -> unit Lwt.t;
|
|
||||||
}
|
|
||||||
|
|
||||||
let create_worker p2p state =
|
|
||||||
|
|
||||||
let cancelation, cancel, _on_cancel = Lwt_utils.canceler () in
|
|
||||||
|
|
||||||
let broadcast m = Tezos_p2p.broadcast p2p m in
|
|
||||||
|
|
||||||
let discovery_worker =
|
|
||||||
let rec worker_loop () =
|
|
||||||
let nets = State.Net.active state in
|
|
||||||
Lwt_list.iter_p
|
|
||||||
(fun net ->
|
|
||||||
State.Net.Blockchain.head net >>= fun head ->
|
|
||||||
State.Valid_block.block_locator state 50 head >>= fun locator ->
|
|
||||||
broadcast Tezos_p2p.(Discover_blocks (State.Net.id net, locator)) ;
|
|
||||||
broadcast Tezos_p2p.(Current_operations (State.Net.id net)) ;
|
|
||||||
Lwt.return_unit)
|
|
||||||
nets >>= fun () ->
|
|
||||||
let timeout = 15. +. Random.float 15. in
|
|
||||||
Lwt.pick [(Lwt_unix.sleep timeout >|= fun () -> `Process);
|
|
||||||
(cancelation () >|= fun () -> `Cancel)] >>= function
|
|
||||||
| `Cancel -> Lwt.return_unit
|
|
||||||
| `Process ->
|
|
||||||
worker_loop ()
|
|
||||||
in
|
|
||||||
Lwt_utils.worker "discoverer" ~run:worker_loop ~cancel in
|
|
||||||
|
|
||||||
let shutdown () =
|
|
||||||
cancel () >>= fun () -> discovery_worker in
|
|
||||||
|
|
||||||
{ shutdown;
|
|
||||||
}
|
|
||||||
|
|
||||||
let shutdown t = t.shutdown ()
|
|
525
src/node/shell/distributed_db.ml
Normal file
525
src/node/shell/distributed_db.ml
Normal file
@ -0,0 +1,525 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module Net_id = State.Net_id
|
||||||
|
module Message = Distributed_db_message
|
||||||
|
module Metadata = Distributed_db_metadata
|
||||||
|
|
||||||
|
type p2p = (Message.t, Metadata.t) P2p.net
|
||||||
|
type connection = (Message.t, Metadata.t) P2p.connection
|
||||||
|
|
||||||
|
type 'a request_param = {
|
||||||
|
data: 'a ;
|
||||||
|
active: unit -> P2p.Peer_id.Set.t ;
|
||||||
|
send: P2p.Peer_id.t -> Message.t -> unit ;
|
||||||
|
}
|
||||||
|
|
||||||
|
module Make_raw
|
||||||
|
(Hash : HASH)
|
||||||
|
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||||
|
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||||
|
(Request_message : sig
|
||||||
|
type param
|
||||||
|
val forge : param -> Hash.t list -> Message.t
|
||||||
|
end) = struct
|
||||||
|
|
||||||
|
type key = Hash.t
|
||||||
|
type value = Disk_table.value
|
||||||
|
type param = Disk_table.store
|
||||||
|
|
||||||
|
module Request = struct
|
||||||
|
type param = Request_message.param request_param
|
||||||
|
let active { active } = active ()
|
||||||
|
let send { data ; send } gid keys =
|
||||||
|
send gid (Request_message.forge data keys)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Scheduler =
|
||||||
|
Distributed_db_functors.Make_request_scheduler
|
||||||
|
(Hash) (Memory_table) (Request)
|
||||||
|
module Table =
|
||||||
|
Distributed_db_functors.Make_table
|
||||||
|
(Hash) (Disk_table) (Memory_table) (Scheduler)
|
||||||
|
|
||||||
|
type t = {
|
||||||
|
scheduler: Scheduler.t ;
|
||||||
|
table: Table.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
let create ?global_input request_param param =
|
||||||
|
let scheduler = Scheduler.create request_param in
|
||||||
|
let table = Table.create ?global_input scheduler param in
|
||||||
|
{ scheduler ; table }
|
||||||
|
|
||||||
|
let shutdown { scheduler } =
|
||||||
|
Scheduler.shutdown scheduler
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module Raw_operation =
|
||||||
|
Make_raw (Operation_hash) (State.Operation) (Operation_hash.Table) (struct
|
||||||
|
type param = Net_id.t
|
||||||
|
let forge net_id keys = Message.Get_operations (net_id, keys)
|
||||||
|
end)
|
||||||
|
|
||||||
|
module Raw_block_header =
|
||||||
|
Make_raw (Block_hash) (State.Block_header) (Block_hash.Table) (struct
|
||||||
|
type param = Net_id.t
|
||||||
|
let forge net_id keys = Message.Get_block_headers (net_id, keys)
|
||||||
|
end)
|
||||||
|
|
||||||
|
module Raw_protocol =
|
||||||
|
Make_raw (Protocol_hash) (State.Protocol) (Protocol_hash.Table) (struct
|
||||||
|
type param = unit
|
||||||
|
let forge () keys = Message.Get_protocols keys
|
||||||
|
end)
|
||||||
|
|
||||||
|
type callback = {
|
||||||
|
notify_branch: P2p.Peer_id.t -> Block_hash.t list -> unit ;
|
||||||
|
current_branch: int -> Block_hash.t list Lwt.t ;
|
||||||
|
notify_head: P2p.Peer_id.t -> Block_hash.t -> Operation_hash.t list -> unit ;
|
||||||
|
current_head: int -> (Block_hash.t * Operation_hash.t list) Lwt.t ;
|
||||||
|
disconnection: P2p.Peer_id.t -> unit ;
|
||||||
|
}
|
||||||
|
|
||||||
|
type db = {
|
||||||
|
p2p: p2p ;
|
||||||
|
p2p_readers: p2p_reader P2p.Peer_id.Table.t ;
|
||||||
|
disk: State.t ;
|
||||||
|
active_nets: net Net_id.Table.t ;
|
||||||
|
protocol_db: Raw_protocol.t ;
|
||||||
|
block_input: (Block_hash.t * Store.Block_header.t) Watcher.input ;
|
||||||
|
operation_input: (Operation_hash.t * Store.Operation.t) Watcher.input ;
|
||||||
|
}
|
||||||
|
|
||||||
|
and net = {
|
||||||
|
net: State.Net.t ;
|
||||||
|
global_db: db ;
|
||||||
|
operation_db: Raw_operation.t ;
|
||||||
|
block_header_db: Raw_block_header.t ;
|
||||||
|
callback: callback ;
|
||||||
|
active_peers: P2p.Peer_id.Set.t ref ;
|
||||||
|
active_connections: p2p_reader P2p.Peer_id.Table.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
and p2p_reader = {
|
||||||
|
gid: P2p.Peer_id.t ;
|
||||||
|
conn: connection ;
|
||||||
|
peer_active_nets: net Net_id.Table.t ;
|
||||||
|
canceler: Lwt_utils.Canceler.t ;
|
||||||
|
mutable worker: unit Lwt.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
type t = db
|
||||||
|
|
||||||
|
let state { net } = net
|
||||||
|
|
||||||
|
module P2p_reader = struct
|
||||||
|
|
||||||
|
type t = p2p_reader
|
||||||
|
|
||||||
|
let may_activate global_db state net_id f =
|
||||||
|
match Net_id.Table.find state.peer_active_nets net_id with
|
||||||
|
| net_db ->
|
||||||
|
f net_db
|
||||||
|
| exception Not_found ->
|
||||||
|
match Net_id.Table.find global_db.active_nets net_id with
|
||||||
|
| net_db ->
|
||||||
|
net_db.active_peers :=
|
||||||
|
P2p.Peer_id.Set.add state.gid !(net_db.active_peers) ;
|
||||||
|
P2p.Peer_id.Table.add net_db.active_connections
|
||||||
|
state.gid state ;
|
||||||
|
Net_id.Table.add state.peer_active_nets net_id net_db ;
|
||||||
|
f net_db
|
||||||
|
| exception Not_found ->
|
||||||
|
(* TODO decrease peer score. *)
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let deactivate state net_db =
|
||||||
|
net_db.callback.disconnection state.gid ;
|
||||||
|
net_db.active_peers :=
|
||||||
|
P2p.Peer_id.Set.remove state.gid !(net_db.active_peers) ;
|
||||||
|
P2p.Peer_id.Table.remove net_db.active_connections state.gid
|
||||||
|
|
||||||
|
let may_handle state net_id f =
|
||||||
|
match Net_id.Table.find state.peer_active_nets net_id with
|
||||||
|
| exception Not_found ->
|
||||||
|
(* TODO decrease peer score *)
|
||||||
|
Lwt.return_unit
|
||||||
|
| net_db ->
|
||||||
|
f net_db
|
||||||
|
|
||||||
|
let may_handle_global global_db net_id f =
|
||||||
|
match Net_id.Table.find global_db.active_nets net_id with
|
||||||
|
| exception Not_found ->
|
||||||
|
Lwt.return_unit
|
||||||
|
| net_db ->
|
||||||
|
f net_db
|
||||||
|
|
||||||
|
let handle_msg global_db state msg =
|
||||||
|
|
||||||
|
let open Message in
|
||||||
|
let open Logging.Node.Worker in
|
||||||
|
|
||||||
|
lwt_debug "Read message from %a: %a"
|
||||||
|
P2p.Peer_id.pp_short state.gid Message.pp_json msg >>= fun () ->
|
||||||
|
|
||||||
|
match msg with
|
||||||
|
|
||||||
|
| Get_current_branch net_id ->
|
||||||
|
may_handle_global global_db net_id @@ fun net_db ->
|
||||||
|
if not (Net_id.Table.mem state.peer_active_nets net_id) then
|
||||||
|
ignore
|
||||||
|
@@ P2p.try_send global_db.p2p state.conn
|
||||||
|
@@ Get_current_branch net_id ;
|
||||||
|
net_db.callback.current_branch 200 >>= fun locator ->
|
||||||
|
ignore
|
||||||
|
@@ P2p.try_send global_db.p2p state.conn
|
||||||
|
@@ Current_branch (net_id, locator) ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Current_branch (net_id, locator) ->
|
||||||
|
may_activate global_db state net_id @@ fun net_db ->
|
||||||
|
net_db.callback.notify_branch state.gid locator ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Deactivate net_id ->
|
||||||
|
may_handle state net_id @@ fun net_db ->
|
||||||
|
deactivate state net_db ;
|
||||||
|
Net_id.Table.remove state.peer_active_nets net_id ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Get_current_head net_id ->
|
||||||
|
may_handle state net_id @@ fun net_db ->
|
||||||
|
net_db.callback.current_head 200 >>= fun (head, mempool) ->
|
||||||
|
ignore
|
||||||
|
@@ P2p.try_send global_db.p2p state.conn
|
||||||
|
@@ Current_head (net_id, head, mempool) ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Current_head (net_id, head, mempool) ->
|
||||||
|
may_handle state net_id @@ fun net_db ->
|
||||||
|
net_db.callback.notify_head state.gid head mempool ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Get_block_headers (net_id, hashes) ->
|
||||||
|
may_handle state net_id @@ fun net_db ->
|
||||||
|
(* Should we filter out invalid block ? *)
|
||||||
|
(* Should we filter out blocks whose validity is unknown ? *)
|
||||||
|
(* Should we blame request of unadvertised blocks ? *)
|
||||||
|
Lwt_list.iter_p
|
||||||
|
(fun hash ->
|
||||||
|
Raw_block_header.Table.read
|
||||||
|
net_db.block_header_db.table hash >|= function
|
||||||
|
| None -> ()
|
||||||
|
| Some p ->
|
||||||
|
ignore @@
|
||||||
|
P2p.try_send global_db.p2p state.conn (Block_header p))
|
||||||
|
hashes
|
||||||
|
|
||||||
|
| Block_header block ->
|
||||||
|
may_handle state block.shell.net_id @@ fun net_db ->
|
||||||
|
let hash = Store.Block_header.hash block in
|
||||||
|
Raw_block_header.Table.notify
|
||||||
|
net_db.block_header_db.table state.gid hash block >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Get_operations (net_id, hashes) ->
|
||||||
|
may_handle state net_id @@ fun net_db ->
|
||||||
|
Lwt_list.iter_p
|
||||||
|
(fun hash ->
|
||||||
|
Raw_operation.Table.read
|
||||||
|
net_db.operation_db.table hash >|= function
|
||||||
|
| None -> ()
|
||||||
|
| Some p ->
|
||||||
|
ignore @@
|
||||||
|
P2p.try_send global_db.p2p state.conn (Operation p))
|
||||||
|
hashes
|
||||||
|
|
||||||
|
| Operation operation ->
|
||||||
|
may_handle state operation.shell.net_id @@ fun net_db ->
|
||||||
|
let hash = Store.Operation.hash operation in
|
||||||
|
Raw_operation.Table.notify
|
||||||
|
net_db.operation_db.table state.gid hash operation >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
| Get_protocols hashes ->
|
||||||
|
Lwt_list.iter_p
|
||||||
|
(fun hash ->
|
||||||
|
Raw_protocol.Table.read
|
||||||
|
global_db.protocol_db.table hash >|= function
|
||||||
|
| None -> ()
|
||||||
|
| Some p ->
|
||||||
|
ignore @@
|
||||||
|
P2p.try_send global_db.p2p state.conn (Protocol p))
|
||||||
|
hashes
|
||||||
|
|
||||||
|
| Protocol protocol ->
|
||||||
|
let hash = Store.Protocol.hash protocol in
|
||||||
|
Raw_protocol.Table.notify
|
||||||
|
global_db.protocol_db.table state.gid hash protocol >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let rec worker_loop global_db state =
|
||||||
|
Lwt_utils.protect ~canceler:state.canceler begin fun () ->
|
||||||
|
P2p.recv global_db.p2p state.conn
|
||||||
|
end >>= function
|
||||||
|
| Ok msg ->
|
||||||
|
handle_msg global_db state msg >>= fun () ->
|
||||||
|
worker_loop global_db state
|
||||||
|
| Error _ ->
|
||||||
|
Net_id.Table.iter
|
||||||
|
(fun _ -> deactivate state)
|
||||||
|
state.peer_active_nets ;
|
||||||
|
P2p.Peer_id.Table.remove global_db.p2p_readers state.gid ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let run db gid conn =
|
||||||
|
let canceler = Lwt_utils.Canceler.create () in
|
||||||
|
let state = {
|
||||||
|
conn ; gid ; canceler ;
|
||||||
|
peer_active_nets = Net_id.Table.create 17 ;
|
||||||
|
worker = Lwt.return_unit ;
|
||||||
|
} in
|
||||||
|
Net_id.Table.iter (fun net_id _net_db ->
|
||||||
|
Lwt.async begin fun () ->
|
||||||
|
P2p.send db.p2p conn (Get_current_branch net_id)
|
||||||
|
end)
|
||||||
|
db.active_nets ;
|
||||||
|
state.worker <-
|
||||||
|
Lwt_utils.worker "db_network_reader"
|
||||||
|
~run:(fun () -> worker_loop db state)
|
||||||
|
~cancel:(fun () -> Lwt_utils.Canceler.cancel canceler) ;
|
||||||
|
P2p.Peer_id.Table.add db.p2p_readers gid state
|
||||||
|
|
||||||
|
let shutdown s =
|
||||||
|
Lwt_utils.Canceler.cancel s.canceler >>= fun () ->
|
||||||
|
s.worker
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
let active_peer_ids p2p () =
|
||||||
|
List.fold_left
|
||||||
|
(fun acc conn ->
|
||||||
|
let { P2p.Connection_info.peer_id } = P2p.connection_info p2p conn in
|
||||||
|
P2p.Peer_id.Set.add peer_id acc)
|
||||||
|
P2p.Peer_id.Set.empty
|
||||||
|
(P2p.connections p2p)
|
||||||
|
|
||||||
|
let raw_try_send p2p peer_id msg =
|
||||||
|
match P2p.find_connection p2p peer_id with
|
||||||
|
| None -> ()
|
||||||
|
| Some conn -> ignore (P2p.try_send p2p conn msg : bool)
|
||||||
|
|
||||||
|
let create disk p2p =
|
||||||
|
let global_request =
|
||||||
|
{ data = () ;
|
||||||
|
active = active_peer_ids p2p ;
|
||||||
|
send = raw_try_send p2p ;
|
||||||
|
} in
|
||||||
|
let protocol_db = Raw_protocol.create global_request disk in
|
||||||
|
let active_nets = Net_id.Table.create 17 in
|
||||||
|
let p2p_readers = P2p.Peer_id.Table.create 17 in
|
||||||
|
let block_input = Watcher.create_input () in
|
||||||
|
let operation_input = Watcher.create_input () in
|
||||||
|
let db =
|
||||||
|
{ p2p ; p2p_readers ; disk ;
|
||||||
|
active_nets ; protocol_db ;
|
||||||
|
block_input ; operation_input } in
|
||||||
|
P2p.on_new_connection p2p (P2p_reader.run db) ;
|
||||||
|
P2p.iter_connections p2p (P2p_reader.run db) ;
|
||||||
|
db
|
||||||
|
|
||||||
|
let activate ~callback ({ p2p ; active_nets } as global_db) net =
|
||||||
|
let net_id = State.Net.id net in
|
||||||
|
match Net_id.Table.find active_nets net_id with
|
||||||
|
| exception Not_found ->
|
||||||
|
let active_peers = ref P2p.Peer_id.Set.empty in
|
||||||
|
let p2p_request =
|
||||||
|
let net_id = State.Net.id net in
|
||||||
|
{ data = net_id ;
|
||||||
|
active = (fun () -> !active_peers) ;
|
||||||
|
send = raw_try_send p2p ;
|
||||||
|
} in
|
||||||
|
let operation_db =
|
||||||
|
Raw_operation.create
|
||||||
|
~global_input:global_db.operation_input p2p_request net in
|
||||||
|
let block_header_db =
|
||||||
|
Raw_block_header.create
|
||||||
|
~global_input:global_db.block_input p2p_request net in
|
||||||
|
let net = {
|
||||||
|
global_db ; operation_db ; block_header_db ;
|
||||||
|
net ; callback ; active_peers ;
|
||||||
|
active_connections = P2p.Peer_id.Table.create 53 ;
|
||||||
|
} in
|
||||||
|
P2p.iter_connections p2p (fun _peer_id conn ->
|
||||||
|
Lwt.async begin fun () ->
|
||||||
|
P2p.send p2p conn (Get_current_branch net_id)
|
||||||
|
end) ;
|
||||||
|
Net_id.Table.add active_nets net_id net ;
|
||||||
|
net
|
||||||
|
| net ->
|
||||||
|
net
|
||||||
|
|
||||||
|
let deactivate net =
|
||||||
|
let { active_nets ; p2p } = net.global_db in
|
||||||
|
let net_id = State.Net.id net.net in
|
||||||
|
Net_id.Table.remove active_nets net_id ;
|
||||||
|
P2p.Peer_id.Table.iter
|
||||||
|
(fun _peer_id reader ->
|
||||||
|
P2p_reader.deactivate reader net ;
|
||||||
|
Lwt.async begin fun () ->
|
||||||
|
P2p.send p2p reader.conn (Deactivate net_id)
|
||||||
|
end)
|
||||||
|
net.active_connections ;
|
||||||
|
Raw_operation.shutdown net.operation_db >>= fun () ->
|
||||||
|
Raw_block_header.shutdown net.block_header_db >>= fun () ->
|
||||||
|
Lwt.return_unit >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let get_net { active_nets } net_id =
|
||||||
|
try Some (Net_id.Table.find active_nets net_id)
|
||||||
|
with Not_found -> None
|
||||||
|
|
||||||
|
let shutdown { p2p ; p2p_readers ; active_nets } =
|
||||||
|
P2p.Peer_id.Table.fold
|
||||||
|
(fun _peer_id reader acc ->
|
||||||
|
P2p_reader.shutdown reader >>= fun () -> acc)
|
||||||
|
p2p_readers
|
||||||
|
Lwt.return_unit >>= fun () ->
|
||||||
|
Net_id.Table.fold
|
||||||
|
(fun _ net_db acc ->
|
||||||
|
Raw_operation.shutdown net_db.operation_db >>= fun () ->
|
||||||
|
Raw_block_header.shutdown net_db.block_header_db >>= fun () ->
|
||||||
|
acc)
|
||||||
|
active_nets
|
||||||
|
Lwt.return_unit >>= fun () ->
|
||||||
|
P2p.shutdown p2p >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
module type DISTRIBUTED_DB = Distributed_db_functors.DISTRIBUTED_DB
|
||||||
|
|
||||||
|
module Make
|
||||||
|
(Table : DISTRIBUTED_DB)
|
||||||
|
(Kind : sig
|
||||||
|
type t
|
||||||
|
val proj: t -> Table.t
|
||||||
|
end) = struct
|
||||||
|
type t = Kind.t
|
||||||
|
type key = Table.key
|
||||||
|
type value = Table.value
|
||||||
|
let known t k = Table.known (Kind.proj t) k
|
||||||
|
let read t k = Table.read (Kind.proj t) k
|
||||||
|
let read_exn t k = Table.read_exn (Kind.proj t) k
|
||||||
|
let prefetch t ?peer k = Table.prefetch (Kind.proj t) ?peer k
|
||||||
|
let fetch t ?peer k = Table.fetch (Kind.proj t) ?peer k
|
||||||
|
let commit t k = Table.commit (Kind.proj t) k
|
||||||
|
let inject t k v = Table.inject (Kind.proj t) k v
|
||||||
|
let watch t = Table.watch (Kind.proj t)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Operation =
|
||||||
|
Make (Raw_operation.Table) (struct
|
||||||
|
type t = net
|
||||||
|
let proj net = net.operation_db.table
|
||||||
|
end)
|
||||||
|
|
||||||
|
module Block_header =
|
||||||
|
Make (Raw_block_header.Table) (struct
|
||||||
|
type t = net
|
||||||
|
let proj net = net.block_header_db.table
|
||||||
|
end)
|
||||||
|
|
||||||
|
module Protocol =
|
||||||
|
Make (Raw_protocol.Table) (struct
|
||||||
|
type t = db
|
||||||
|
let proj db = db.protocol_db.table
|
||||||
|
end)
|
||||||
|
|
||||||
|
let inject_block t bytes =
|
||||||
|
let hash = Block_hash.hash_bytes [bytes] in
|
||||||
|
match
|
||||||
|
Data_encoding.Binary.of_bytes Store.Block_header.encoding bytes
|
||||||
|
with
|
||||||
|
| None ->
|
||||||
|
failwith "Cannot parse block header."
|
||||||
|
| Some block ->
|
||||||
|
match get_net t block.shell.net_id with
|
||||||
|
| None ->
|
||||||
|
failwith "Unknown network."
|
||||||
|
| Some net_db ->
|
||||||
|
Block_header.known net_db hash >>= function
|
||||||
|
| true ->
|
||||||
|
failwith "Previously injected block."
|
||||||
|
| false ->
|
||||||
|
Raw_block_header.Table.inject
|
||||||
|
net_db.block_header_db.table hash block >>= function
|
||||||
|
| false ->
|
||||||
|
failwith "Previously injected block."
|
||||||
|
| true ->
|
||||||
|
return (hash, block)
|
||||||
|
|
||||||
|
let broadcast_head net head mempool =
|
||||||
|
let msg : Message.t =
|
||||||
|
Current_head (State.Net.id net.net, head, mempool) in
|
||||||
|
P2p.Peer_id.Table.iter
|
||||||
|
(fun _peer_id state ->
|
||||||
|
ignore (P2p.try_send net.global_db.p2p state.conn msg))
|
||||||
|
net.active_connections
|
||||||
|
|
||||||
|
let read_block { active_nets } hash =
|
||||||
|
Net_id.Table.fold
|
||||||
|
(fun _net_id net acc ->
|
||||||
|
acc >>= function
|
||||||
|
| Some _ -> acc
|
||||||
|
| None ->
|
||||||
|
Block_header.read net hash >>= function
|
||||||
|
| None -> acc
|
||||||
|
| Some block -> Lwt.return (Some (net, block)))
|
||||||
|
active_nets
|
||||||
|
Lwt.return_none
|
||||||
|
|
||||||
|
let read_block_exn t hash =
|
||||||
|
read_block t hash >>= function
|
||||||
|
| None -> Lwt.fail Not_found
|
||||||
|
| Some b -> Lwt.return b
|
||||||
|
|
||||||
|
let read_operation { active_nets } hash =
|
||||||
|
Net_id.Table.fold
|
||||||
|
(fun _net_id net acc ->
|
||||||
|
acc >>= function
|
||||||
|
| Some _ -> acc
|
||||||
|
| None ->
|
||||||
|
Operation.read net hash >>= function
|
||||||
|
| None -> acc
|
||||||
|
| Some block -> Lwt.return (Some (net, block)))
|
||||||
|
active_nets
|
||||||
|
Lwt.return_none
|
||||||
|
|
||||||
|
let read_operation_exn t hash =
|
||||||
|
read_operation t hash >>= function
|
||||||
|
| None -> Lwt.fail Not_found
|
||||||
|
| Some b -> Lwt.return b
|
||||||
|
|
||||||
|
let watch_block { block_input } =
|
||||||
|
Watcher.create_stream block_input
|
||||||
|
let watch_operation { operation_input } =
|
||||||
|
Watcher.create_stream operation_input
|
||||||
|
let watch_protocol { protocol_db } =
|
||||||
|
Raw_protocol.Table.watch protocol_db.table
|
||||||
|
|
||||||
|
module Raw = struct
|
||||||
|
type 'a t =
|
||||||
|
| Bootstrap
|
||||||
|
| Advertise of P2p_types.Point.t list
|
||||||
|
| Message of 'a
|
||||||
|
| Disconnect
|
||||||
|
let encoding = P2p.Raw.encoding Message.cfg.encoding
|
||||||
|
let supported_versions = Message.cfg.versions
|
||||||
|
end
|
91
src/node/shell/distributed_db.mli
Normal file
91
src/node/shell/distributed_db.mli
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
type t
|
||||||
|
type db = t
|
||||||
|
|
||||||
|
module Message = Distributed_db_message
|
||||||
|
module Metadata = Distributed_db_metadata
|
||||||
|
|
||||||
|
type p2p = (Message.t, Metadata.t) P2p.net
|
||||||
|
|
||||||
|
val create: State.t -> p2p -> t
|
||||||
|
val shutdown: t -> unit Lwt.t
|
||||||
|
|
||||||
|
type net
|
||||||
|
|
||||||
|
val state: net -> State.Net.t
|
||||||
|
|
||||||
|
type callback = {
|
||||||
|
notify_branch: P2p.Peer_id.t -> Block_hash.t list -> unit ;
|
||||||
|
current_branch: int -> Block_hash.t list Lwt.t ;
|
||||||
|
notify_head: P2p.Peer_id.t -> Block_hash.t -> Operation_hash.t list -> unit ;
|
||||||
|
current_head: int -> (Block_hash.t * Operation_hash.t list) Lwt.t ;
|
||||||
|
disconnection: P2p.Peer_id.t -> unit ;
|
||||||
|
}
|
||||||
|
|
||||||
|
val activate: callback:callback -> t -> State.Net.t -> net
|
||||||
|
val deactivate: net -> unit Lwt.t
|
||||||
|
|
||||||
|
module type DISTRIBUTED_DB = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
type value
|
||||||
|
val known: t -> key -> bool Lwt.t
|
||||||
|
val read: t -> key -> value option Lwt.t
|
||||||
|
val read_exn: t -> key -> value Lwt.t
|
||||||
|
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||||
|
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||||
|
val commit: t -> key -> unit Lwt.t
|
||||||
|
val inject: t -> key -> value -> bool Lwt.t
|
||||||
|
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||||
|
end
|
||||||
|
|
||||||
|
module Operation :
|
||||||
|
DISTRIBUTED_DB with type t = net
|
||||||
|
and type key := Operation_hash.t
|
||||||
|
and type value := Store.Operation.t
|
||||||
|
|
||||||
|
module Block_header :
|
||||||
|
DISTRIBUTED_DB with type t = net
|
||||||
|
and type key := Block_hash.t
|
||||||
|
and type value := Store.Block_header.t
|
||||||
|
|
||||||
|
module Protocol :
|
||||||
|
DISTRIBUTED_DB with type t = db
|
||||||
|
and type key := Protocol_hash.t
|
||||||
|
and type value := Tezos_compiler.Protocol.t
|
||||||
|
|
||||||
|
val broadcast_head:
|
||||||
|
net -> Block_hash.t -> Operation_hash.t list -> unit
|
||||||
|
|
||||||
|
val inject_block:
|
||||||
|
t -> MBytes.t -> (Block_hash.t * Store.Block_header.t) tzresult Lwt.t
|
||||||
|
|
||||||
|
val read_block:
|
||||||
|
t -> Block_hash.t -> (net * Store.Block_header.t) option Lwt.t
|
||||||
|
val read_block_exn:
|
||||||
|
t -> Block_hash.t -> (net * Store.Block_header.t) Lwt.t
|
||||||
|
|
||||||
|
val read_operation:
|
||||||
|
t -> Operation_hash.t -> (net * Store.Operation.t) option Lwt.t
|
||||||
|
val read_operation_exn:
|
||||||
|
t -> Operation_hash.t -> (net * Store.Operation.t) Lwt.t
|
||||||
|
|
||||||
|
val watch_block:
|
||||||
|
t -> (Block_hash.t * Store.Block_header.t) Lwt_stream.t * Watcher.stopper
|
||||||
|
val watch_operation:
|
||||||
|
t -> (Operation_hash.t * Store.Operation.t) Lwt_stream.t * Watcher.stopper
|
||||||
|
val watch_protocol:
|
||||||
|
t -> (Protocol_hash.t * Store.Protocol.t) Lwt_stream.t * Watcher.stopper
|
||||||
|
|
||||||
|
module Raw : sig
|
||||||
|
val encoding: Message.t P2p.Raw.t Data_encoding.t
|
||||||
|
val supported_versions: P2p_types.Version.t list
|
||||||
|
end
|
311
src/node/shell/distributed_db_functors.ml
Normal file
311
src/node/shell/distributed_db_functors.ml
Normal file
@ -0,0 +1,311 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module type DISTRIBUTED_DB = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
type value
|
||||||
|
val known: t -> key -> bool Lwt.t
|
||||||
|
val read: t -> key -> value option Lwt.t
|
||||||
|
val read_exn: t -> key -> value Lwt.t
|
||||||
|
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||||
|
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||||
|
val commit: t -> key -> unit Lwt.t
|
||||||
|
(* val commit_invalid: t -> key -> unit Lwt.t *) (* TODO *)
|
||||||
|
val inject: t -> key -> value -> bool Lwt.t
|
||||||
|
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||||
|
end
|
||||||
|
|
||||||
|
module type SCHEDULER_EVENTS = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
val request: t -> P2p.Peer_id.t option -> key -> unit
|
||||||
|
val notify: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
val notify_unrequested: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
val notify_duplicate: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_table
|
||||||
|
(Hash : HASH)
|
||||||
|
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||||
|
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||||
|
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) : sig
|
||||||
|
|
||||||
|
include DISTRIBUTED_DB with type key = Hash.t
|
||||||
|
and type value = Disk_table.value
|
||||||
|
val create:
|
||||||
|
?global_input:(key * value) Watcher.input ->
|
||||||
|
Scheduler.t -> Disk_table.store -> t
|
||||||
|
val notify: t -> P2p.Peer_id.t -> key -> value -> unit Lwt.t
|
||||||
|
|
||||||
|
end = struct
|
||||||
|
|
||||||
|
type key = Hash.t
|
||||||
|
type value = Disk_table.value
|
||||||
|
|
||||||
|
type t = {
|
||||||
|
scheduler: Scheduler.t ;
|
||||||
|
disk: Disk_table.store ;
|
||||||
|
memory: status Memory_table.t ;
|
||||||
|
global_input: (key * value) Watcher.input option ;
|
||||||
|
input: (key * value) Watcher.input ;
|
||||||
|
}
|
||||||
|
|
||||||
|
and status =
|
||||||
|
| Pending of value Lwt.u
|
||||||
|
| Found of value
|
||||||
|
|
||||||
|
let known s k =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> Disk_table.known s.disk k
|
||||||
|
| Pending _ -> Lwt.return_false
|
||||||
|
| Found _ -> Lwt.return_true
|
||||||
|
|
||||||
|
let read s k =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> Disk_table.read_opt s.disk k
|
||||||
|
| Found v -> Lwt.return (Some v)
|
||||||
|
| Pending _ -> Lwt.return_none
|
||||||
|
|
||||||
|
let read_exn s k =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> Disk_table.read_exn s.disk k
|
||||||
|
| Found v -> Lwt.return v
|
||||||
|
| Pending _ -> Lwt.fail Not_found
|
||||||
|
|
||||||
|
let fetch s ?peer k =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> begin
|
||||||
|
Disk_table.read_opt s.disk k >>= function
|
||||||
|
| None ->
|
||||||
|
let waiter, wakener = Lwt.wait () in
|
||||||
|
Memory_table.add s.memory k (Pending wakener) ;
|
||||||
|
Scheduler.request s.scheduler peer k ;
|
||||||
|
waiter
|
||||||
|
| Some v -> Lwt.return v
|
||||||
|
end
|
||||||
|
| Pending w -> Lwt.waiter_of_wakener w
|
||||||
|
| Found v -> Lwt.return v
|
||||||
|
|
||||||
|
let prefetch s ?peer k = Lwt.ignore_result (fetch s ?peer k)
|
||||||
|
|
||||||
|
let notify s p k v =
|
||||||
|
Scheduler.notify s.scheduler p k ;
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> begin
|
||||||
|
Disk_table.known s.disk k >>= function
|
||||||
|
| true ->
|
||||||
|
Scheduler.notify_duplicate s.scheduler p k ;
|
||||||
|
Lwt.return_unit
|
||||||
|
| false ->
|
||||||
|
Scheduler.notify_unrequested s.scheduler p k ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end
|
||||||
|
| Pending w ->
|
||||||
|
Memory_table.replace s.memory k (Found v) ;
|
||||||
|
Lwt.wakeup w v ;
|
||||||
|
iter_option s.global_input
|
||||||
|
~f:(fun input -> Watcher.notify input (k, v)) ;
|
||||||
|
Watcher.notify s.input (k, v) ;
|
||||||
|
Lwt.return_unit
|
||||||
|
| Found _ ->
|
||||||
|
Scheduler.notify_duplicate s.scheduler p k ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let inject s k v =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> begin
|
||||||
|
Disk_table.known s.disk k >>= function
|
||||||
|
| true ->
|
||||||
|
Lwt.return_false
|
||||||
|
| false ->
|
||||||
|
Memory_table.add s.memory k (Found v) ;
|
||||||
|
Lwt.return_true
|
||||||
|
end
|
||||||
|
| Pending _
|
||||||
|
| Found _ ->
|
||||||
|
Lwt.return_false
|
||||||
|
|
||||||
|
let commit s k =
|
||||||
|
match Memory_table.find s.memory k with
|
||||||
|
| exception Not_found -> Lwt.return_unit (* TODO error ?? *)
|
||||||
|
| Pending _ -> Lwt.return_unit (* TODO error ?? *)
|
||||||
|
| Found v ->
|
||||||
|
Disk_table.store s.disk v >>= fun _ ->
|
||||||
|
Memory_table.remove s.memory k ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let watch s = Watcher.create_stream s.input
|
||||||
|
|
||||||
|
let create ?global_input scheduler disk =
|
||||||
|
let memory = Memory_table.create 17 in
|
||||||
|
let input = Watcher.create_input () in
|
||||||
|
{ scheduler ; disk ; memory ; input ; global_input }
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type REQUEST = sig
|
||||||
|
type key
|
||||||
|
type param
|
||||||
|
val active : param -> P2p.Peer_id.Set.t
|
||||||
|
val send : param -> P2p.Peer_id.t -> key list -> unit
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_request_scheduler
|
||||||
|
(Hash : HASH)
|
||||||
|
(Table : Hashtbl.S with type key := Hash.t)
|
||||||
|
(Request : REQUEST with type key := Hash.t) : sig
|
||||||
|
|
||||||
|
type t
|
||||||
|
val create: Request.param -> t
|
||||||
|
val shutdown: t -> unit Lwt.t
|
||||||
|
include SCHEDULER_EVENTS with type t := t and type key := Hash.t
|
||||||
|
|
||||||
|
end = struct
|
||||||
|
|
||||||
|
type key = Hash.t
|
||||||
|
type param = Request.param
|
||||||
|
|
||||||
|
type t = {
|
||||||
|
push_to_worker: event -> unit ;
|
||||||
|
cancel_worker: unit -> unit Lwt.t ;
|
||||||
|
worker: unit Lwt.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
and event =
|
||||||
|
| Request of P2p.Peer_id.t option * key
|
||||||
|
| Notify of P2p.Peer_id.t * key
|
||||||
|
| Notify_duplicate of P2p.Peer_id.t * key
|
||||||
|
| Notify_unrequested of P2p.Peer_id.t * key
|
||||||
|
|
||||||
|
let request t p k =
|
||||||
|
t.push_to_worker (Request (p, k))
|
||||||
|
let notify t p k =
|
||||||
|
t.push_to_worker (Notify (p, k))
|
||||||
|
let notify_duplicate t p k =
|
||||||
|
t.push_to_worker (Notify_duplicate (p, k))
|
||||||
|
let notify_unrequested t p k =
|
||||||
|
t.push_to_worker (Notify_unrequested (p, k))
|
||||||
|
|
||||||
|
type worker_state = {
|
||||||
|
param: Request.param ;
|
||||||
|
pending: status Table.t ;
|
||||||
|
cancelation: unit -> unit Lwt.t ;
|
||||||
|
wait_events: unit -> event list Lwt.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
and status = {
|
||||||
|
peers: P2p.Peer_id.Set.t ;
|
||||||
|
next_request: float ;
|
||||||
|
delay: float ;
|
||||||
|
}
|
||||||
|
|
||||||
|
let compute_timeout state =
|
||||||
|
let next =
|
||||||
|
Table.fold
|
||||||
|
(fun _ { next_request } acc -> min next_request acc)
|
||||||
|
state.pending infinity in
|
||||||
|
let now = Unix.gettimeofday () in
|
||||||
|
let delay = next -. now in
|
||||||
|
if delay <= 0. then Lwt.return_unit else Lwt_unix.sleep delay
|
||||||
|
|
||||||
|
let process_event state = function
|
||||||
|
| Request (peer, key) -> begin
|
||||||
|
try
|
||||||
|
let data = Table.find state.pending key in
|
||||||
|
let peers =
|
||||||
|
match peer with
|
||||||
|
| None -> data.peers
|
||||||
|
| Some peer -> P2p.Peer_id.Set.add peer data.peers in
|
||||||
|
Table.replace state.pending key { data with peers } ;
|
||||||
|
Lwt.return_unit
|
||||||
|
with Not_found ->
|
||||||
|
let peers =
|
||||||
|
match peer with
|
||||||
|
| None -> P2p.Peer_id.Set.empty
|
||||||
|
| Some peer -> P2p.Peer_id.Set.singleton peer in
|
||||||
|
Table.add state.pending key {
|
||||||
|
peers ;
|
||||||
|
next_request = Unix.gettimeofday () ;
|
||||||
|
delay = 1.0 ;
|
||||||
|
} ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end
|
||||||
|
| Notify (_gid, key) ->
|
||||||
|
Table.remove state.pending key ;
|
||||||
|
Lwt.return_unit
|
||||||
|
| Notify_unrequested _
|
||||||
|
| Notify_duplicate _ ->
|
||||||
|
(* TODO *)
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let worker_loop state =
|
||||||
|
let process = process_event state in
|
||||||
|
let rec loop () =
|
||||||
|
let shutdown = state.cancelation () >|= fun () -> `Shutdown
|
||||||
|
and timeout = compute_timeout state >|= fun () -> `Timeout
|
||||||
|
and events = state.wait_events () >|= fun events -> `Events events in
|
||||||
|
Lwt.pick [ timeout ; events ; shutdown ] >>= function
|
||||||
|
| `Shutdown -> Lwt.return_unit
|
||||||
|
| `Events events ->
|
||||||
|
Lwt_list.iter_s process events >>= fun () ->
|
||||||
|
loop ()
|
||||||
|
| `Timeout ->
|
||||||
|
let now = Unix.gettimeofday () in
|
||||||
|
let active_peers = Request.active state.param in
|
||||||
|
let requests =
|
||||||
|
Table.fold
|
||||||
|
(fun key { peers ; next_request ; delay } acc ->
|
||||||
|
if next_request > now +. 0.2 then
|
||||||
|
acc
|
||||||
|
else
|
||||||
|
let still_peers = P2p.Peer_id.Set.inter peers active_peers in
|
||||||
|
if P2p.Peer_id.Set.is_empty still_peers &&
|
||||||
|
not (P2p.Peer_id.Set.is_empty peers) then
|
||||||
|
( Table.remove state.pending key ; acc )
|
||||||
|
else
|
||||||
|
let requested_peers =
|
||||||
|
if P2p.Peer_id.Set.is_empty peers
|
||||||
|
then active_peers
|
||||||
|
else peers in
|
||||||
|
let next = { peers = still_peers ;
|
||||||
|
next_request = now +. delay ;
|
||||||
|
delay = delay *. 1.2 } in
|
||||||
|
Table.replace state.pending key next ;
|
||||||
|
P2p.Peer_id.Set.fold
|
||||||
|
(fun gid acc ->
|
||||||
|
let requests =
|
||||||
|
try key :: P2p_types.Peer_id.Map.find gid acc
|
||||||
|
with Not_found -> [key] in
|
||||||
|
P2p_types.Peer_id.Map.add gid requests acc)
|
||||||
|
requested_peers
|
||||||
|
acc)
|
||||||
|
state.pending P2p_types.Peer_id.Map.empty in
|
||||||
|
P2p_types.Peer_id.Map.iter (Request.send state.param) requests ;
|
||||||
|
loop ()
|
||||||
|
in
|
||||||
|
loop
|
||||||
|
|
||||||
|
let create param =
|
||||||
|
let cancelation, cancel_worker, _ = Lwt_utils.canceler () in
|
||||||
|
let push_to_worker, wait_events = Lwt_utils.queue () in
|
||||||
|
let pending = Table.create 17 in
|
||||||
|
let worker_state =
|
||||||
|
{ cancelation ; wait_events ; pending ; param } in
|
||||||
|
let worker =
|
||||||
|
Lwt_utils.worker "db_request_scheduler"
|
||||||
|
~run:(worker_loop worker_state)
|
||||||
|
~cancel:cancel_worker in
|
||||||
|
{ cancel_worker ; push_to_worker ; worker }
|
||||||
|
|
||||||
|
let shutdown s =
|
||||||
|
s.cancel_worker () >>= fun () ->
|
||||||
|
s.worker
|
||||||
|
|
||||||
|
end
|
66
src/node/shell/distributed_db_functors.mli
Normal file
66
src/node/shell/distributed_db_functors.mli
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module type DISTRIBUTED_DB = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
type value
|
||||||
|
val known: t -> key -> bool Lwt.t
|
||||||
|
val read: t -> key -> value option Lwt.t
|
||||||
|
val read_exn: t -> key -> value Lwt.t
|
||||||
|
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||||
|
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||||
|
val commit: t -> key -> unit Lwt.t
|
||||||
|
(* val commit_invalid: t -> key -> unit Lwt.t *) (* TODO *)
|
||||||
|
val inject: t -> key -> value -> bool Lwt.t
|
||||||
|
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||||
|
end
|
||||||
|
|
||||||
|
module type SCHEDULER_EVENTS = sig
|
||||||
|
type t
|
||||||
|
type key
|
||||||
|
val request: t -> P2p.Peer_id.t option -> key -> unit
|
||||||
|
val notify: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
val notify_unrequested: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
val notify_duplicate: t -> P2p.Peer_id.t -> key -> unit
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_table
|
||||||
|
(Hash : HASH)
|
||||||
|
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||||
|
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||||
|
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) : sig
|
||||||
|
|
||||||
|
include DISTRIBUTED_DB with type key = Hash.t
|
||||||
|
and type value = Disk_table.value
|
||||||
|
val create:
|
||||||
|
?global_input:(key * value) Watcher.input ->
|
||||||
|
Scheduler.t -> Disk_table.store -> t
|
||||||
|
val notify: t -> P2p.Peer_id.t -> key -> value -> unit Lwt.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type REQUEST = sig
|
||||||
|
type key
|
||||||
|
type param
|
||||||
|
val active : param -> P2p.Peer_id.Set.t
|
||||||
|
val send : param -> P2p.Peer_id.t -> key list -> unit
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make_request_scheduler
|
||||||
|
(Hash : HASH)
|
||||||
|
(Table : Hashtbl.S with type key := Hash.t)
|
||||||
|
(Request : REQUEST with type key := Hash.t) : sig
|
||||||
|
|
||||||
|
type t
|
||||||
|
val create: Request.param -> t
|
||||||
|
val shutdown: t -> unit Lwt.t
|
||||||
|
include SCHEDULER_EVENTS with type t := t and type key := Hash.t
|
||||||
|
|
||||||
|
end
|
137
src/node/shell/distributed_db_message.ml
Normal file
137
src/node/shell/distributed_db_message.ml
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module Net_id = Store.Net_id
|
||||||
|
|
||||||
|
type t =
|
||||||
|
|
||||||
|
| Get_current_branch of Net_id.t
|
||||||
|
| Current_branch of Net_id.t * Block_hash.t list (* Block locator *)
|
||||||
|
| Deactivate of Net_id.t
|
||||||
|
|
||||||
|
| Get_current_head of Net_id.t
|
||||||
|
| Current_head of Net_id.t * Block_hash.t * Operation_hash.t list
|
||||||
|
|
||||||
|
| Get_block_headers of Net_id.t * Block_hash.t list
|
||||||
|
| Block_header of Store.Block_header.t
|
||||||
|
|
||||||
|
| Get_operations of Net_id.t * Operation_hash.t list
|
||||||
|
| Operation of Store.Operation.t
|
||||||
|
|
||||||
|
| Get_protocols of Protocol_hash.t list
|
||||||
|
| Protocol of Tezos_compiler.Protocol.t
|
||||||
|
|
||||||
|
let encoding =
|
||||||
|
let open Data_encoding in
|
||||||
|
let case ?max_length ~tag encoding unwrap wrap =
|
||||||
|
P2p.Encoding { tag; encoding; wrap; unwrap; max_length } in
|
||||||
|
[
|
||||||
|
case ~tag:0x10
|
||||||
|
(obj1
|
||||||
|
(req "get_current_branch" Net_id.encoding))
|
||||||
|
(function
|
||||||
|
| Get_current_branch net_id -> Some net_id
|
||||||
|
| _ -> None)
|
||||||
|
(fun net_id -> Get_current_branch net_id) ;
|
||||||
|
|
||||||
|
case ~tag:0x11
|
||||||
|
(obj2
|
||||||
|
(req "net_id" Net_id.encoding)
|
||||||
|
(req "current_branch" (list Block_hash.encoding)))
|
||||||
|
(function
|
||||||
|
| Current_branch (net_id, bhs) -> Some (net_id, bhs)
|
||||||
|
| _ -> None)
|
||||||
|
(fun (net_id, bhs) -> Current_branch (net_id, bhs)) ;
|
||||||
|
|
||||||
|
case ~tag:0x12
|
||||||
|
(obj1
|
||||||
|
(req "deactivate" Net_id.encoding))
|
||||||
|
(function
|
||||||
|
| Deactivate net_id -> Some net_id
|
||||||
|
| _ -> None)
|
||||||
|
(fun net_id -> Deactivate net_id) ;
|
||||||
|
|
||||||
|
case ~tag:0x13
|
||||||
|
(obj1
|
||||||
|
(req "get_current_head" Net_id.encoding))
|
||||||
|
(function
|
||||||
|
| Get_current_head net_id -> Some net_id
|
||||||
|
| _ -> None)
|
||||||
|
(fun net_id -> Get_current_branch net_id) ;
|
||||||
|
|
||||||
|
case ~tag:0x14
|
||||||
|
(obj3
|
||||||
|
(req "net_id" Net_id.encoding)
|
||||||
|
(req "current_head" Block_hash.encoding)
|
||||||
|
(req "current_mempool" (list Operation_hash.encoding)))
|
||||||
|
(function
|
||||||
|
| Current_head (net_id, bh, ops) -> Some (net_id, bh, ops)
|
||||||
|
| _ -> None)
|
||||||
|
(fun (net_id, bh, ops) -> Current_head (net_id, bh, ops)) ;
|
||||||
|
|
||||||
|
case ~tag:0x20
|
||||||
|
(obj2
|
||||||
|
(req "net_id" Net_id.encoding)
|
||||||
|
(req "get_block_headers" (list Block_hash.encoding)))
|
||||||
|
(function
|
||||||
|
| Get_block_headers (net_id, bhs) -> Some (net_id, bhs)
|
||||||
|
| _ -> None)
|
||||||
|
(fun (net_id, bhs) -> Get_block_headers (net_id, bhs)) ;
|
||||||
|
|
||||||
|
case ~tag:0x21
|
||||||
|
(obj1 (req "block_header" Store.Block_header.encoding))
|
||||||
|
(function
|
||||||
|
| Block_header bh -> Some bh
|
||||||
|
| _ -> None)
|
||||||
|
(fun bh -> Block_header bh) ;
|
||||||
|
|
||||||
|
case ~tag:0x30
|
||||||
|
(obj2
|
||||||
|
(req "net_id" Net_id.encoding)
|
||||||
|
(req "get_operations" (list Operation_hash.encoding)))
|
||||||
|
(function
|
||||||
|
| Get_operations (net_id, bhs) -> Some (net_id, bhs)
|
||||||
|
| _ -> None)
|
||||||
|
(fun (net_id, bhs) -> Get_operations (net_id, bhs)) ;
|
||||||
|
|
||||||
|
case ~tag:0x31
|
||||||
|
(obj1 (req "operation" Store.Operation.encoding))
|
||||||
|
(function Operation o -> Some o | _ -> None)
|
||||||
|
(fun o -> Operation o);
|
||||||
|
|
||||||
|
case ~tag:0x40
|
||||||
|
(obj1
|
||||||
|
(req "get_protocols" (list Protocol_hash.encoding)))
|
||||||
|
(function
|
||||||
|
| Get_protocols protos -> Some protos
|
||||||
|
| _ -> None)
|
||||||
|
(fun protos -> Get_protocols protos);
|
||||||
|
|
||||||
|
case ~tag:0x41
|
||||||
|
(obj1 (req "protocol" Store.Protocol.encoding))
|
||||||
|
(function Protocol proto -> Some proto | _ -> None)
|
||||||
|
(fun proto -> Protocol proto);
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
let versions =
|
||||||
|
let open P2p.Version in
|
||||||
|
[ { name = "TEZOS" ;
|
||||||
|
major = 0 ;
|
||||||
|
minor = 0 ;
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
let cfg : _ P2p.message_config = { encoding ; versions }
|
||||||
|
|
||||||
|
let raw_encoding = P2p.Raw.encoding encoding
|
||||||
|
|
||||||
|
let pp_json ppf msg =
|
||||||
|
Format.pp_print_string ppf
|
||||||
|
(Data_encoding_ezjsonm.to_string (Data_encoding.Json.construct raw_encoding (Message msg)))
|
32
src/node/shell/distributed_db_message.mli
Normal file
32
src/node/shell/distributed_db_message.mli
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
module Net_id = Store.Net_id
|
||||||
|
|
||||||
|
type t =
|
||||||
|
|
||||||
|
| Get_current_branch of Net_id.t
|
||||||
|
| Current_branch of Net_id.t * Block_hash.t list (* Block locator *)
|
||||||
|
| Deactivate of Net_id.t
|
||||||
|
|
||||||
|
| Get_current_head of Net_id.t
|
||||||
|
| Current_head of Net_id.t * Block_hash.t * Operation_hash.t list
|
||||||
|
|
||||||
|
| Get_block_headers of Net_id.t * Block_hash.t list
|
||||||
|
| Block_header of Store.Block_header.t
|
||||||
|
|
||||||
|
| Get_operations of Net_id.t * Operation_hash.t list
|
||||||
|
| Operation of Store.Operation.t
|
||||||
|
|
||||||
|
| Get_protocols of Protocol_hash.t list
|
||||||
|
| Protocol of Tezos_compiler.Protocol.t
|
||||||
|
|
||||||
|
val cfg : t P2p.message_config
|
||||||
|
|
||||||
|
val pp_json : Format.formatter -> t -> unit
|
15
src/node/shell/distributed_db_metadata.ml
Normal file
15
src/node/shell/distributed_db_metadata.ml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
type t = unit
|
||||||
|
let initial = ()
|
||||||
|
let encoding = Data_encoding.empty
|
||||||
|
let score () = 0.
|
||||||
|
|
||||||
|
let cfg : _ P2p.meta_config = { encoding ; initial ; score }
|
11
src/node/shell/distributed_db_metadata.mli
Normal file
11
src/node/shell/distributed_db_metadata.mli
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2016. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
type t = unit
|
||||||
|
val cfg : t P2p.meta_config
|
@ -12,211 +12,76 @@ open Logging.Node.Worker
|
|||||||
|
|
||||||
let inject_operation validator ?force bytes =
|
let inject_operation validator ?force bytes =
|
||||||
let t =
|
let t =
|
||||||
match Store.Operation.of_bytes bytes with
|
match Data_encoding.Binary.of_bytes Store.Operation.encoding bytes with
|
||||||
| None -> failwith "Can't parse the operation"
|
| None -> failwith "Can't parse the operation"
|
||||||
| Some operation ->
|
| Some operation ->
|
||||||
Validator.get validator operation.shell.net_id >>=? fun net_validator ->
|
Validator.get
|
||||||
|
validator operation.shell.net_id >>=? fun net_validator ->
|
||||||
let pv = Validator.prevalidator net_validator in
|
let pv = Validator.prevalidator net_validator in
|
||||||
Prevalidator.inject_operation pv ?force operation in
|
Prevalidator.inject_operation pv ?force operation in
|
||||||
let hash = Operation_hash.hash_bytes [bytes] in
|
let hash = Operation_hash.hash_bytes [bytes] in
|
||||||
Lwt.return (hash, t)
|
Lwt.return (hash, t)
|
||||||
|
|
||||||
let inject_protocol state ?force:_ proto =
|
let inject_protocol state ?force:_ proto =
|
||||||
let proto_bytes = Store.Protocol.to_bytes proto in
|
let proto_bytes =
|
||||||
|
Data_encoding.Binary.to_bytes Store.Protocol.encoding proto in
|
||||||
let hash = Protocol_hash.hash_bytes [proto_bytes] in
|
let hash = Protocol_hash.hash_bytes [proto_bytes] in
|
||||||
let validation = Updater.compile hash proto >>= function
|
let validation =
|
||||||
| false -> Lwt.fail_with (Format.asprintf "Invalid protocol %a: compilation failed" Protocol_hash.pp_short hash)
|
Updater.compile hash proto >>= function
|
||||||
|
| false ->
|
||||||
|
failwith
|
||||||
|
"Compilation failed (%a)"
|
||||||
|
Protocol_hash.pp_short hash
|
||||||
| true ->
|
| true ->
|
||||||
State.Protocol.store state proto_bytes >>= function
|
State.Protocol.store state proto >>= function
|
||||||
| Ok None -> Lwt.fail_with "Previously registred protocol"
|
| false ->
|
||||||
| t -> t >|? ignore |> Lwt.return
|
failwith
|
||||||
|
"Previously registred protocol (%a)"
|
||||||
|
Protocol_hash.pp_short hash
|
||||||
|
| true -> return ()
|
||||||
in
|
in
|
||||||
Lwt.return (hash, validation)
|
Lwt.return (hash, validation)
|
||||||
|
|
||||||
let process_operation state validator bytes =
|
let inject_block validator ?force bytes =
|
||||||
State.Operation.store state bytes >>= function
|
Validator.inject_block validator ?force bytes >>=? fun (hash, block) ->
|
||||||
| Error _ | Ok None -> Lwt.return_unit
|
return (hash, (block >>=? fun _ -> return ()))
|
||||||
| Ok (Some (hash, op)) ->
|
|
||||||
lwt_log_info "process Operation %a (net: %a)"
|
|
||||||
Operation_hash.pp_short hash
|
|
||||||
Store.pp_net_id op.Store.shell.net_id >>= fun () ->
|
|
||||||
Validator.get validator op.shell.net_id >>= function
|
|
||||||
| Error _ -> Lwt.return_unit
|
|
||||||
| Ok net_validator ->
|
|
||||||
let prevalidator = Validator.prevalidator net_validator in
|
|
||||||
Prevalidator.register_operation prevalidator hash ;
|
|
||||||
Lwt.return_unit
|
|
||||||
|
|
||||||
let process_protocol state _validator bytes =
|
|
||||||
State.Protocol.store state bytes >>= function
|
|
||||||
| Error _ | Ok None -> Lwt.return_unit
|
|
||||||
| Ok (Some (hash, _proto)) ->
|
|
||||||
(* TODO: Store only pending protocols... *)
|
|
||||||
lwt_log_info "process Protocol %a" Protocol_hash.pp_short hash
|
|
||||||
|
|
||||||
let process_block state validator bytes =
|
|
||||||
State.Block.store state bytes >>= function
|
|
||||||
| Error _ | Ok None -> Lwt.return_unit
|
|
||||||
| Ok (Some (hash, block)) ->
|
|
||||||
lwt_log_notice "process Block %a (net: %a)"
|
|
||||||
Block_hash.pp_short hash
|
|
||||||
Store.pp_net_id block.Store.shell.net_id >>= fun () ->
|
|
||||||
lwt_debug "process Block %a (predecessor %a)"
|
|
||||||
Block_hash.pp_short hash
|
|
||||||
Block_hash.pp_short block.shell.predecessor >>= fun () ->
|
|
||||||
lwt_debug "process Block %a (timestamp %a)"
|
|
||||||
Block_hash.pp_short hash
|
|
||||||
Time.pp_hum block.shell.timestamp >>= fun () ->
|
|
||||||
Validator.notify_block validator hash block >>= fun () ->
|
|
||||||
Lwt.return_unit
|
|
||||||
|
|
||||||
let inject_block state validator ?(force = false) bytes =
|
|
||||||
let hash = Block_hash.hash_bytes [bytes] in
|
|
||||||
let validation =
|
|
||||||
State.Block.store state bytes >>=? function
|
|
||||||
| None -> failwith "Previously registred block."
|
|
||||||
| Some (hash, block) ->
|
|
||||||
lwt_log_notice "inject Block %a"
|
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
|
||||||
Lwt.return (State.Net.get state block.Store.shell.net_id) >>=? fun net ->
|
|
||||||
State.Net.Blockchain.head net >>= fun head ->
|
|
||||||
if force
|
|
||||||
|| Fitness.compare head.fitness block.shell.fitness <= 0 then
|
|
||||||
Validator.get validator block.shell.net_id >>=? fun net ->
|
|
||||||
Validator.fetch_block net hash >>=? fun _ ->
|
|
||||||
return ()
|
|
||||||
else
|
|
||||||
failwith "Fitness is below the current one" in
|
|
||||||
Lwt.return (hash, validation)
|
|
||||||
|
|
||||||
let process state validator msg =
|
|
||||||
let open Tezos_p2p in
|
|
||||||
match msg with
|
|
||||||
|
|
||||||
| Discover_blocks (net_id, blocks) ->
|
|
||||||
lwt_log_info "process Discover_blocks" >>= fun () ->
|
|
||||||
if not (State.Net.is_active state net_id) then
|
|
||||||
Lwt.return_nil
|
|
||||||
else begin
|
|
||||||
match State.Net.get state net_id with
|
|
||||||
| Error _ -> Lwt.return_nil
|
|
||||||
| Ok net ->
|
|
||||||
State.Block.prefetch state net_id blocks ;
|
|
||||||
State.Net.Blockchain.find_new net blocks 50 >>= function
|
|
||||||
| Ok new_block_hashes ->
|
|
||||||
Lwt.return [Block_inventory (net_id, new_block_hashes)]
|
|
||||||
| Error _ -> Lwt.return_nil
|
|
||||||
end
|
|
||||||
|
|
||||||
| Block_inventory (net_id, blocks) ->
|
|
||||||
lwt_log_info "process Block_inventory" >>= fun () ->
|
|
||||||
if State.Net.is_active state net_id then
|
|
||||||
State.Block.prefetch state net_id blocks ;
|
|
||||||
Lwt.return_nil
|
|
||||||
|
|
||||||
| Get_blocks blocks ->
|
|
||||||
lwt_log_info "process Get_blocks" >>= fun () ->
|
|
||||||
Lwt_list.map_p (State.Block.raw_read state) blocks >>= fun blocks ->
|
|
||||||
let cons_block acc = function
|
|
||||||
| Some b -> Block b :: acc
|
|
||||||
| None -> acc in
|
|
||||||
Lwt.return (List.fold_left cons_block [] blocks)
|
|
||||||
|
|
||||||
| Block block ->
|
|
||||||
lwt_log_info "process Block" >>= fun () ->
|
|
||||||
process_block state validator block >>= fun _ ->
|
|
||||||
Lwt.return_nil
|
|
||||||
|
|
||||||
| Current_operations net_id ->
|
|
||||||
lwt_log_info "process Current_operations" >>= fun () ->
|
|
||||||
if not (State.Net.is_active state net_id) then
|
|
||||||
Lwt.return_nil
|
|
||||||
else begin
|
|
||||||
Validator.get validator net_id >>= function
|
|
||||||
| Error _ ->
|
|
||||||
Lwt.return_nil
|
|
||||||
| Ok net_validator ->
|
|
||||||
let pv = Validator.prevalidator net_validator in
|
|
||||||
let mempool = (fst (Prevalidator.operations pv)).applied in
|
|
||||||
Lwt.return [Operation_inventory (net_id, mempool)]
|
|
||||||
end
|
|
||||||
|
|
||||||
| Operation_inventory (net_id, ops) ->
|
|
||||||
lwt_log_info "process Operation_inventory" >>= fun () ->
|
|
||||||
if State.Net.is_active state net_id then
|
|
||||||
State.Operation.prefetch state net_id ops ;
|
|
||||||
Lwt.return_nil
|
|
||||||
|
|
||||||
| Get_operations ops ->
|
|
||||||
lwt_log_info "process Get_operations" >>= fun () ->
|
|
||||||
Lwt_list.map_p (State.Operation.raw_read state) ops >>= fun ops ->
|
|
||||||
let cons_operation acc = function
|
|
||||||
| Some op -> Operation op :: acc
|
|
||||||
| None -> acc in
|
|
||||||
Lwt.return (List.fold_left cons_operation [] ops)
|
|
||||||
|
|
||||||
| Operation content ->
|
|
||||||
lwt_log_info "process Operation" >>= fun () ->
|
|
||||||
process_operation state validator content >>= fun () ->
|
|
||||||
Lwt.return_nil
|
|
||||||
|
|
||||||
| Get_protocols protos ->
|
|
||||||
lwt_log_info "process Get_protocols" >>= fun () ->
|
|
||||||
Lwt_list.map_p (State.Protocol.raw_read state) protos >>= fun protos ->
|
|
||||||
let cons_protocol acc = function
|
|
||||||
| Some proto -> Protocol proto :: acc
|
|
||||||
| None -> acc in
|
|
||||||
Lwt.return (List.fold_left cons_protocol [] protos)
|
|
||||||
|
|
||||||
| Protocol content ->
|
|
||||||
lwt_log_info "process Protocol" >>= fun () ->
|
|
||||||
process_protocol state validator content >>= fun () ->
|
|
||||||
Lwt.return_nil
|
|
||||||
|
|
||||||
|
|
||||||
type t = {
|
type t = {
|
||||||
state: State.t ;
|
state: State.t ;
|
||||||
|
distributed_db: Distributed_db.t ;
|
||||||
validator: Validator.worker ;
|
validator: Validator.worker ;
|
||||||
|
global_db: Distributed_db.net ;
|
||||||
global_net: State.Net.t ;
|
global_net: State.Net.t ;
|
||||||
global_validator: Validator.t ;
|
global_validator: Validator.t ;
|
||||||
inject_block:
|
inject_block:
|
||||||
?force:bool -> MBytes.t -> (Block_hash.t * unit tzresult Lwt.t) Lwt.t ;
|
?force:bool -> MBytes.t ->
|
||||||
|
(Block_hash.t * unit tzresult Lwt.t) tzresult Lwt.t ;
|
||||||
inject_operation:
|
inject_operation:
|
||||||
?force:bool -> MBytes.t -> (Operation_hash.t * unit tzresult Lwt.t) Lwt.t ;
|
?force:bool -> MBytes.t ->
|
||||||
|
(Operation_hash.t * unit tzresult Lwt.t) Lwt.t ;
|
||||||
inject_protocol:
|
inject_protocol:
|
||||||
?force:bool -> Store.protocol -> (Protocol_hash.t * unit tzresult Lwt.t) Lwt.t ;
|
?force:bool -> Store.Protocol.t ->
|
||||||
p2p: Tezos_p2p.net ; (* For P2P RPCs *)
|
(Protocol_hash.t * unit tzresult Lwt.t) Lwt.t ;
|
||||||
|
p2p: Distributed_db.p2p ; (* For P2P RPCs *)
|
||||||
shutdown: unit -> unit Lwt.t ;
|
shutdown: unit -> unit Lwt.t ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let request_operations net _net_id operations =
|
|
||||||
(* TODO improve the lookup strategy.
|
|
||||||
For now simply broadcast the request to all our neighbours. *)
|
|
||||||
Tezos_p2p.broadcast net (Get_operations operations)
|
|
||||||
|
|
||||||
let request_blocks net _net_id blocks =
|
|
||||||
(* TODO improve the lookup strategy.
|
|
||||||
For now simply broadcast the request to all our neighbours. *)
|
|
||||||
Tezos_p2p.broadcast net (Get_blocks blocks)
|
|
||||||
|
|
||||||
let request_protocols net protocols =
|
|
||||||
(* TODO improve the lookup strategy.
|
|
||||||
For now simply broadcast the request to all our neighbours. *)
|
|
||||||
Tezos_p2p.broadcast net (Get_protocols protocols)
|
|
||||||
|
|
||||||
let init_p2p net_params =
|
let init_p2p net_params =
|
||||||
match net_params with
|
match net_params with
|
||||||
| None ->
|
| None ->
|
||||||
lwt_log_notice "P2P layer is disabled" >>= fun () ->
|
lwt_log_notice "P2P layer is disabled" >>= fun () ->
|
||||||
Lwt.return Tezos_p2p.faked_network
|
Lwt.return P2p.faked_network
|
||||||
| Some (config, limits) ->
|
| Some (config, limits) ->
|
||||||
lwt_log_notice "bootstraping network..." >>= fun () ->
|
lwt_log_notice "bootstraping network..." >>= fun () ->
|
||||||
Tezos_p2p.create config limits >>= fun p2p ->
|
P2p.create
|
||||||
Lwt.async (fun () -> Tezos_p2p.maintain p2p) ;
|
~config ~limits
|
||||||
|
Distributed_db_metadata.cfg
|
||||||
|
Distributed_db_message.cfg >>= fun p2p ->
|
||||||
|
Lwt.async (fun () -> P2p.maintain p2p) ;
|
||||||
Lwt.return p2p
|
Lwt.return p2p
|
||||||
|
|
||||||
type config = {
|
type config = {
|
||||||
genesis: Store.genesis ;
|
genesis: State.Net.genesis ;
|
||||||
store_root: string ;
|
store_root: string ;
|
||||||
context_root: string ;
|
context_root: string ;
|
||||||
test_protocol: Protocol_hash.t option ;
|
test_protocol: Protocol_hash.t option ;
|
||||||
@ -226,68 +91,30 @@ type config = {
|
|||||||
|
|
||||||
let create { genesis ; store_root ; context_root ;
|
let create { genesis ; store_root ; context_root ;
|
||||||
test_protocol ; patch_context ; p2p = net_params } =
|
test_protocol ; patch_context ; p2p = net_params } =
|
||||||
lwt_debug "-> Node.create" >>= fun () ->
|
|
||||||
init_p2p net_params >>= fun p2p ->
|
init_p2p net_params >>= fun p2p ->
|
||||||
lwt_log_info "reading state..." >>= fun () ->
|
|
||||||
let request_operations = request_operations p2p in
|
|
||||||
let request_blocks = request_blocks p2p in
|
|
||||||
let request_protocols = request_protocols p2p in
|
|
||||||
State.read
|
State.read
|
||||||
~request_operations ~request_blocks ~request_protocols
|
~store_root ~context_root ?patch_context () >>=? fun state ->
|
||||||
~store_root ~context_root ~ttl:(48 * 3600) (* 2 days *)
|
let distributed_db = Distributed_db.create state p2p in
|
||||||
?patch_context () >>= fun state ->
|
let validator = Validator.create_worker state distributed_db in
|
||||||
let validator = Validator.create_worker p2p state in
|
State.Net.create state
|
||||||
let discoverer = Discoverer.create_worker p2p state in
|
?test_protocol
|
||||||
begin
|
~forked_network_ttl:(48 * 3600) (* 2 days *)
|
||||||
match State.Net.get state (Net genesis.Store.block) with
|
genesis >>= fun global_net ->
|
||||||
| Ok net -> return net
|
|
||||||
| Error _ -> State.Net.create state ?test_protocol genesis
|
|
||||||
end >>=? fun global_net ->
|
|
||||||
Validator.activate validator global_net >>= fun global_validator ->
|
Validator.activate validator global_net >>= fun global_validator ->
|
||||||
let cleanup () =
|
let global_db = Validator.net_db global_validator in
|
||||||
Tezos_p2p.shutdown p2p >>= fun () ->
|
|
||||||
Lwt.join [ Validator.shutdown validator ;
|
|
||||||
Discoverer.shutdown discoverer ] >>= fun () ->
|
|
||||||
State.store state
|
|
||||||
in
|
|
||||||
let canceler = Lwt_utils.Canceler.create () in
|
|
||||||
lwt_log_info "starting worker..." >>= fun () ->
|
|
||||||
let worker =
|
|
||||||
let handle_msg peer msg =
|
|
||||||
process state validator msg >>= fun msgs ->
|
|
||||||
List.iter
|
|
||||||
(fun msg -> ignore @@ Tezos_p2p.try_send p2p peer msg)
|
|
||||||
msgs;
|
|
||||||
Lwt.return_unit
|
|
||||||
in
|
|
||||||
let rec worker_loop () =
|
|
||||||
Lwt_utils.protect ~canceler begin fun () ->
|
|
||||||
Tezos_p2p.recv p2p >>= return
|
|
||||||
end >>=? fun (peer, msg) ->
|
|
||||||
handle_msg peer msg >>= fun () ->
|
|
||||||
worker_loop () in
|
|
||||||
worker_loop () >>= function
|
|
||||||
| Error [Lwt_utils.Canceled] | Ok () ->
|
|
||||||
cleanup ()
|
|
||||||
| Error err ->
|
|
||||||
lwt_log_error
|
|
||||||
"@[Unexpected error in worker@ %a@]"
|
|
||||||
pp_print_error err >>= fun () ->
|
|
||||||
cleanup ()
|
|
||||||
in
|
|
||||||
let shutdown () =
|
let shutdown () =
|
||||||
lwt_log_info "stopping worker..." >>= fun () ->
|
P2p.shutdown p2p >>= fun () ->
|
||||||
Lwt_utils.Canceler.cancel canceler >>= fun () ->
|
Validator.shutdown validator >>= fun () ->
|
||||||
worker >>= fun () ->
|
Lwt.return_unit
|
||||||
lwt_log_info "stopped"
|
|
||||||
in
|
in
|
||||||
lwt_debug "<- Node.create" >>= fun () ->
|
|
||||||
return {
|
return {
|
||||||
state ;
|
state ;
|
||||||
|
distributed_db ;
|
||||||
validator ;
|
validator ;
|
||||||
|
global_db ;
|
||||||
global_net ;
|
global_net ;
|
||||||
global_validator ;
|
global_validator ;
|
||||||
inject_block = inject_block state validator ;
|
inject_block = inject_block validator ;
|
||||||
inject_operation = inject_operation validator ;
|
inject_operation = inject_operation validator ;
|
||||||
inject_protocol = inject_protocol state ;
|
inject_protocol = inject_protocol state ;
|
||||||
p2p ;
|
p2p ;
|
||||||
@ -323,7 +150,7 @@ module RPC = struct
|
|||||||
test_network = block.test_network ;
|
test_network = block.test_network ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let convert_block hash (block: State.Block.shell_header) = {
|
let convert_block hash (block: State.Block_header.shell_header) = {
|
||||||
net = block.net_id ;
|
net = block.net_id ;
|
||||||
hash = hash ;
|
hash = hash ;
|
||||||
predecessor = block.predecessor ;
|
predecessor = block.predecessor ;
|
||||||
@ -340,42 +167,99 @@ module RPC = struct
|
|||||||
let inject_protocol node = node.inject_protocol
|
let inject_protocol node = node.inject_protocol
|
||||||
|
|
||||||
let raw_block_info node hash =
|
let raw_block_info node hash =
|
||||||
State.Valid_block.read_exn node.state hash >|= convert
|
Distributed_db.read_block node.distributed_db hash >>= function
|
||||||
|
| Some (net_db, _block) ->
|
||||||
|
let net = Distributed_db.state net_db in
|
||||||
|
State.Valid_block.read_exn net hash >>= fun block ->
|
||||||
|
Lwt.return (convert block)
|
||||||
|
| None ->
|
||||||
|
Lwt.fail Not_found
|
||||||
|
|
||||||
let prevalidation_hash =
|
let prevalidation_hash =
|
||||||
Block_hash.of_b58check
|
Block_hash.of_b58check
|
||||||
"BLockPrevaLidationPrevaLidationPrevaLidationPrZ4mr6"
|
"BLockPrevaLidationPrevaLidationPrevaLidationPrZ4mr6"
|
||||||
|
|
||||||
let get_net node = function
|
let get_net node = function
|
||||||
| `Head _ | `Prevalidation -> node.global_validator, node.global_net
|
| `Genesis | `Head _ | `Prevalidation ->
|
||||||
|
node.global_validator, node.global_db
|
||||||
| `Test_head _ | `Test_prevalidation ->
|
| `Test_head _ | `Test_prevalidation ->
|
||||||
match Validator.test_validator node.global_validator with
|
match Validator.test_validator node.global_validator with
|
||||||
| None -> raise Not_found
|
| None -> raise Not_found
|
||||||
| Some v -> v
|
| Some v -> v
|
||||||
|
|
||||||
let get_pred node n (v: State.Valid_block.t) =
|
let get_validator node = function
|
||||||
if n <= 0 then Lwt.return v else
|
| `Genesis | `Head _ | `Prevalidation -> node.global_validator
|
||||||
let rec loop n h =
|
| `Test_head _ | `Test_prevalidation ->
|
||||||
if n <= 0 then Lwt.return h else
|
match Validator.test_validator node.global_validator with
|
||||||
State.Block.read_pred node.state h >>= function
|
| None -> raise Not_found
|
||||||
| None -> raise Not_found
|
| Some (v, _) -> v
|
||||||
| Some pred -> loop (n-1) pred in
|
|
||||||
loop n v.hash >>= fun h ->
|
let get_validator_per_hash node hash =
|
||||||
State.Valid_block.read node.state h >>= function
|
Distributed_db.read_block_exn
|
||||||
| None | Some (Error _) -> Lwt.fail Not_found (* error in the DB *)
|
node.distributed_db hash >>= fun (_net_db, block) ->
|
||||||
| Some (Ok b) -> Lwt.return b
|
if State.Net_id.equal
|
||||||
|
(State.Net.id node.global_net)
|
||||||
|
block.shell.net_id then
|
||||||
|
Lwt.return (Some (node.global_validator, node.global_db))
|
||||||
|
else
|
||||||
|
match Validator.test_validator node.global_validator with
|
||||||
|
| Some (test_validator, net_db)
|
||||||
|
when State.Net_id.equal
|
||||||
|
(State.Net.id (Validator.net_state test_validator))
|
||||||
|
block.shell.net_id ->
|
||||||
|
Lwt.return (Some (node.global_validator, net_db))
|
||||||
|
| _ -> Lwt.return_none
|
||||||
|
|
||||||
|
let read_valid_block node h =
|
||||||
|
Distributed_db.read_block node.distributed_db h >>= function
|
||||||
|
| None -> Lwt.return_none
|
||||||
|
| Some (_net_db, block) ->
|
||||||
|
State.Net.get node.state block.shell.net_id >>= function
|
||||||
|
| Error _ -> Lwt.return_none
|
||||||
|
| Ok net ->
|
||||||
|
State.Valid_block.read_exn net h >>= fun block ->
|
||||||
|
Lwt.return (Some block)
|
||||||
|
|
||||||
|
let read_valid_block_exn node h =
|
||||||
|
Distributed_db.read_block_exn
|
||||||
|
node.distributed_db h >>= fun (net_db, _block) ->
|
||||||
|
let net = Distributed_db.state net_db in
|
||||||
|
State.Valid_block.read_exn net h >>= fun block ->
|
||||||
|
Lwt.return block
|
||||||
|
|
||||||
|
let get_pred net_db n (v: State.Valid_block.t) =
|
||||||
|
let rec loop net_db n h =
|
||||||
|
if n <= 0 then
|
||||||
|
Lwt.return h
|
||||||
|
else
|
||||||
|
Distributed_db.Block_header.read net_db h >>= function
|
||||||
|
| None -> Lwt.fail Not_found
|
||||||
|
| Some { shell = { predecessor } } ->
|
||||||
|
loop net_db (n-1) predecessor in
|
||||||
|
if n <= 0 then
|
||||||
|
Lwt.return v
|
||||||
|
else
|
||||||
|
loop net_db n v.hash >>= fun hash ->
|
||||||
|
let net_state = Distributed_db.state net_db in
|
||||||
|
State.Valid_block.read_exn net_state hash
|
||||||
|
|
||||||
let block_info node (block: block) =
|
let block_info node (block: block) =
|
||||||
match block with
|
match block with
|
||||||
| `Genesis -> State.Net.Blockchain.genesis node.global_net >|= convert
|
| `Genesis ->
|
||||||
|
State.Valid_block.Current.genesis node.global_net >|= convert
|
||||||
| ( `Head n | `Test_head n ) as block ->
|
| ( `Head n | `Test_head n ) as block ->
|
||||||
let _, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= get_pred node n >|= convert
|
let net_db = Validator.net_db validator in
|
||||||
| `Hash h -> State.Valid_block.read_exn node.state h >|= convert
|
let net_state = Validator.net_state validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
|
get_pred net_db n head >|= convert
|
||||||
|
| `Hash h ->
|
||||||
|
read_valid_block_exn node h >|= convert
|
||||||
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
||||||
let validator, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
let pv = Validator.prevalidator validator in
|
let pv = Validator.prevalidator validator in
|
||||||
State.Net.Blockchain.head net >>= fun head ->
|
let net_state = Validator.net_state validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
let ctxt = Prevalidator.context pv in
|
let ctxt = Prevalidator.context pv in
|
||||||
let (module Proto) = Prevalidator.protocol pv in
|
let (module Proto) = Prevalidator.protocol pv in
|
||||||
Proto.fitness ctxt >|= fun fitness ->
|
Proto.fitness ctxt >|= fun fitness ->
|
||||||
@ -388,16 +272,19 @@ module RPC = struct
|
|||||||
let get_context node block =
|
let get_context node block =
|
||||||
match block with
|
match block with
|
||||||
| `Genesis ->
|
| `Genesis ->
|
||||||
State.Net.Blockchain.genesis node.global_net >>= fun { context } ->
|
State.Valid_block.Current.genesis node.global_net >>= fun block ->
|
||||||
Lwt.return (Some context)
|
Lwt.return (Some block.context)
|
||||||
| ( `Head n | `Test_head n ) as block->
|
| ( `Head n | `Test_head n ) as block ->
|
||||||
let _, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= get_pred node n >>= fun { context } ->
|
let net_state = Validator.net_state validator in
|
||||||
|
let net_db = Validator.net_db validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
|
get_pred net_db n head >>= fun { context } ->
|
||||||
Lwt.return (Some context)
|
Lwt.return (Some context)
|
||||||
| `Hash hash-> begin
|
| `Hash hash-> begin
|
||||||
State.Valid_block.read node.state hash >|= function
|
read_valid_block node hash >|= function
|
||||||
| None | Some (Error _) -> None
|
| None -> None
|
||||||
| Some (Ok { context }) -> Some context
|
| Some { context } -> Some context
|
||||||
end
|
end
|
||||||
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
||||||
let validator, _net = get_net node block in
|
let validator, _net = get_net node block in
|
||||||
@ -407,11 +294,14 @@ module RPC = struct
|
|||||||
let operations node block =
|
let operations node block =
|
||||||
match block with
|
match block with
|
||||||
| `Genesis ->
|
| `Genesis ->
|
||||||
State.Net.Blockchain.genesis node.global_net >>= fun { operations } ->
|
State.Valid_block.Current.genesis node.global_net >>= fun { operations } ->
|
||||||
Lwt.return operations
|
Lwt.return operations
|
||||||
| ( `Head n | `Test_head n ) as block ->
|
| ( `Head n | `Test_head n ) as block ->
|
||||||
let _, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= get_pred node n >>= fun { operations } ->
|
let net_state = Validator.net_state validator in
|
||||||
|
let net_db = Validator.net_db validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
|
get_pred net_db n head >>= fun { operations } ->
|
||||||
Lwt.return operations
|
Lwt.return operations
|
||||||
| (`Prevalidation | `Test_prevalidation) as block ->
|
| (`Prevalidation | `Test_prevalidation) as block ->
|
||||||
let validator, _net = get_net node block in
|
let validator, _net = get_net node block in
|
||||||
@ -419,14 +309,16 @@ module RPC = struct
|
|||||||
let { Updater.applied }, _ = Prevalidator.operations pv in
|
let { Updater.applied }, _ = Prevalidator.operations pv in
|
||||||
Lwt.return applied
|
Lwt.return applied
|
||||||
| `Hash hash->
|
| `Hash hash->
|
||||||
State.Block.read node.state hash >|= function
|
read_valid_block node hash >|= function
|
||||||
| None -> []
|
| None -> []
|
||||||
| Some { Time.data = { shell = { operations }}} -> operations
|
| Some { operations } -> operations
|
||||||
|
|
||||||
let operation_content node hash =
|
let operation_content node hash =
|
||||||
State.Operation.read node.state hash
|
Distributed_db.read_operation node.distributed_db hash >>= function
|
||||||
|
| None -> Lwt.return_none
|
||||||
|
| Some (_, op) -> Lwt.return (Some op)
|
||||||
|
|
||||||
let pending_operations node block =
|
let pending_operations node (block: block) =
|
||||||
match block with
|
match block with
|
||||||
| ( `Head 0 | `Prevalidation
|
| ( `Head 0 | `Prevalidation
|
||||||
| `Test_head 0 | `Test_prevalidation ) as block ->
|
| `Test_head 0 | `Test_prevalidation ) as block ->
|
||||||
@ -434,50 +326,36 @@ module RPC = struct
|
|||||||
let pv = Validator.prevalidator validator in
|
let pv = Validator.prevalidator validator in
|
||||||
Lwt.return (Prevalidator.operations pv)
|
Lwt.return (Prevalidator.operations pv)
|
||||||
| ( `Head n | `Test_head n ) as block ->
|
| ( `Head n | `Test_head n ) as block ->
|
||||||
let _validator, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= get_pred node n >>= fun b ->
|
let prevalidator = Validator.prevalidator validator in
|
||||||
State.Net.Mempool.for_block net b >|= fun ops ->
|
let net_state = Validator.net_state validator in
|
||||||
|
let net_db = Validator.net_db validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
|
get_pred net_db n head >>= fun b ->
|
||||||
|
Prevalidator.pending ~block:b prevalidator >|= fun ops ->
|
||||||
Updater.empty_result, ops
|
Updater.empty_result, ops
|
||||||
| `Genesis ->
|
| `Genesis ->
|
||||||
let net = node.global_net in
|
let net = node.global_net in
|
||||||
State.Net.Blockchain.genesis net >>= fun b ->
|
State.Valid_block.Current.genesis net >>= fun b ->
|
||||||
State.Net.Mempool.for_block net b >|= fun ops ->
|
let validator = get_validator node `Genesis in
|
||||||
|
let prevalidator = Validator.prevalidator validator in
|
||||||
|
Prevalidator.pending ~block:b prevalidator >|= fun ops ->
|
||||||
Updater.empty_result, ops
|
Updater.empty_result, ops
|
||||||
| `Hash h ->
|
| `Hash h -> begin
|
||||||
begin
|
get_validator_per_hash node h >>= function
|
||||||
let nets = State.Net.active node.state in
|
|
||||||
Lwt_list.filter_map_p
|
|
||||||
(fun net ->
|
|
||||||
State.Net.Blockchain.head net >|= fun head ->
|
|
||||||
if Block_hash.equal h head.hash then Some (net, head) else None)
|
|
||||||
nets >>= function
|
|
||||||
| [] -> Lwt.return_none
|
|
||||||
| [net] -> Lwt.return (Some net)
|
|
||||||
| nets ->
|
|
||||||
Lwt_list.filter_p
|
|
||||||
(fun (net, (head: State.Valid_block.t)) ->
|
|
||||||
State.Net.Blockchain.genesis net >|= fun genesis ->
|
|
||||||
not (Block_hash.equal genesis.hash head.hash))
|
|
||||||
nets >>= function
|
|
||||||
| [net] -> Lwt.return (Some net)
|
|
||||||
| _ -> Lwt.fail Not_found
|
|
||||||
end >>= function
|
|
||||||
| Some (net, _head) ->
|
|
||||||
Validator.get_exn
|
|
||||||
node.validator (State.Net.id net) >>= fun net_validator ->
|
|
||||||
let pv = Validator.prevalidator net_validator in
|
|
||||||
Lwt.return (Prevalidator.operations pv)
|
|
||||||
| None ->
|
| None ->
|
||||||
State.Valid_block.read_exn node.state h >>= fun b ->
|
Lwt.return (Updater.empty_result, Operation_hash.Set.empty)
|
||||||
if not (State.Net.is_active node.state b.net_id) then
|
| Some (validator, net_db) ->
|
||||||
raise Not_found ;
|
let net_state = Distributed_db.state net_db in
|
||||||
match State.Net.get node.state b.net_id with
|
let prevalidator = Validator.prevalidator validator in
|
||||||
| Error _ -> raise Not_found
|
State.Valid_block.read_exn net_state h >>= fun block ->
|
||||||
| Ok net ->
|
Prevalidator.pending ~block prevalidator >|= fun ops ->
|
||||||
State.Net.Mempool.for_block net b >|= fun ops ->
|
Updater.empty_result, ops
|
||||||
Updater.empty_result, ops
|
end
|
||||||
|
|
||||||
let protocols { state } = State.Protocol.keys state
|
let protocols { state } =
|
||||||
|
State.Protocol.list state >>= fun set ->
|
||||||
|
Lwt.return (Protocol_hash.Set.elements set)
|
||||||
|
|
||||||
let protocol_content node hash =
|
let protocol_content node hash =
|
||||||
State.Protocol.read node.state hash
|
State.Protocol.read node.state hash
|
||||||
@ -487,28 +365,32 @@ module RPC = struct
|
|||||||
match block with
|
match block with
|
||||||
| `Genesis ->
|
| `Genesis ->
|
||||||
let net = node.global_net in
|
let net = node.global_net in
|
||||||
State.Net.Blockchain.genesis net >>= return
|
State.Valid_block.Current.genesis net >>= return
|
||||||
| ( `Head 0 | `Prevalidation
|
| ( `Head 0 | `Prevalidation
|
||||||
| `Test_head 0 | `Test_prevalidation ) as block ->
|
| `Test_head 0 | `Test_prevalidation ) as block ->
|
||||||
let _validator, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= return
|
let net_state = Validator.net_state validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= return
|
||||||
| `Head n | `Test_head n as block -> begin
|
| `Head n | `Test_head n as block -> begin
|
||||||
let _validator, net = get_net node block in
|
let validator = get_validator node block in
|
||||||
State.Net.Blockchain.head net >>= get_pred node n >>= return
|
let net_state = Validator.net_state validator in
|
||||||
|
let net_db = Validator.net_db validator in
|
||||||
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
|
get_pred net_db n head >>= return
|
||||||
end
|
end
|
||||||
| `Hash hash -> begin
|
| `Hash hash ->
|
||||||
State.Valid_block.read node.state hash >>= function
|
read_valid_block node hash >>= function
|
||||||
| None -> Lwt.return (error_exn Not_found)
|
| None -> Lwt.return (error_exn Not_found)
|
||||||
| Some data -> Lwt.return data
|
| Some data -> return data
|
||||||
end
|
|
||||||
end >>=? fun { hash ; context ; protocol } ->
|
end >>=? fun { hash ; context ; protocol } ->
|
||||||
begin
|
begin
|
||||||
match protocol with
|
match protocol with
|
||||||
| None -> failwith "Unknown protocol version"
|
| None -> failwith "Unknown protocol version"
|
||||||
| Some protocol -> return protocol
|
| Some protocol -> return protocol
|
||||||
end >>=? function (module Proto) as protocol ->
|
end >>=? function (module Proto) as protocol ->
|
||||||
|
let net_db = Validator.net_db node.global_validator in
|
||||||
Prevalidator.preapply
|
Prevalidator.preapply
|
||||||
node.state context protocol hash timestamp sort ops >>=? fun (ctxt, r) ->
|
net_db context protocol hash timestamp sort ops >>=? fun (ctxt, r) ->
|
||||||
Proto.fitness ctxt >>= fun fitness ->
|
Proto.fitness ctxt >>= fun fitness ->
|
||||||
return (fitness, r)
|
return (fitness, r)
|
||||||
|
|
||||||
@ -536,18 +418,31 @@ module RPC = struct
|
|||||||
Lwt.return (Some (RPC.map (fun _ -> ()) dir))
|
Lwt.return (Some (RPC.map (fun _ -> ()) dir))
|
||||||
|
|
||||||
let heads node =
|
let heads node =
|
||||||
State.Valid_block.known_heads node.state >|= Block_hash_map.map convert
|
State.Valid_block.known_heads node.global_net >>= fun heads ->
|
||||||
|
begin
|
||||||
|
match Validator.test_validator node.global_validator with
|
||||||
|
| None -> Lwt.return_nil
|
||||||
|
| Some (_, net_db) ->
|
||||||
|
State.Valid_block.known_heads (Distributed_db.state net_db)
|
||||||
|
end >>= fun test_heads ->
|
||||||
|
let map =
|
||||||
|
List.fold_left
|
||||||
|
(fun map block ->
|
||||||
|
Block_hash.Map.add
|
||||||
|
block.State.Valid_block.hash (convert block) map)
|
||||||
|
Block_hash.Map.empty (test_heads @ heads) in
|
||||||
|
Lwt.return map
|
||||||
|
|
||||||
let predecessors state ignored len head =
|
let predecessors net_state ignored len head =
|
||||||
try
|
try
|
||||||
let rec loop acc len hash =
|
let rec loop acc len hash =
|
||||||
State.Valid_block.read_exn state hash >>= fun block ->
|
State.Valid_block.read_exn net_state hash >>= fun block ->
|
||||||
let bi = convert block in
|
let bi = convert block in
|
||||||
if Block_hash.equal bi.predecessor hash then
|
if Block_hash.equal bi.predecessor hash then
|
||||||
Lwt.return (List.rev (bi :: acc))
|
Lwt.return (List.rev (bi :: acc))
|
||||||
else begin
|
else begin
|
||||||
if len = 0
|
if len = 0
|
||||||
|| Block_hash_set.mem hash ignored then
|
|| Block_hash.Set.mem hash ignored then
|
||||||
Lwt.return (List.rev acc)
|
Lwt.return (List.rev acc)
|
||||||
else
|
else
|
||||||
loop (bi :: acc) (len-1) bi.predecessor
|
loop (bi :: acc) (len-1) bi.predecessor
|
||||||
@ -558,36 +453,37 @@ module RPC = struct
|
|||||||
let list node len heads =
|
let list node len heads =
|
||||||
Lwt_list.fold_left_s
|
Lwt_list.fold_left_s
|
||||||
(fun (ignored, acc) head ->
|
(fun (ignored, acc) head ->
|
||||||
predecessors node.state ignored len head >|= fun predecessors ->
|
Distributed_db.read_block_exn
|
||||||
|
node.distributed_db head >>= fun (net_db, _block) ->
|
||||||
|
let net_state = Distributed_db.state net_db in
|
||||||
|
predecessors net_state ignored len head >|= fun predecessors ->
|
||||||
let ignored =
|
let ignored =
|
||||||
List.fold_right
|
List.fold_right
|
||||||
(fun x s -> Block_hash_set.add x.hash s)
|
(fun x s -> Block_hash.Set.add x.hash s)
|
||||||
predecessors ignored in
|
predecessors ignored in
|
||||||
ignored, predecessors :: acc
|
ignored, predecessors :: acc
|
||||||
)
|
)
|
||||||
(Block_hash_set.empty, [])
|
(Block_hash.Set.empty, [])
|
||||||
heads >|= fun (_, blocks) ->
|
heads >|= fun (_, blocks) ->
|
||||||
List.rev blocks
|
List.rev blocks
|
||||||
|
|
||||||
let block_watcher node =
|
let block_watcher node =
|
||||||
let stream, shutdown = State.Block.create_watcher node.state in
|
let stream, shutdown = Distributed_db.watch_block node.distributed_db in
|
||||||
Lwt_stream.map
|
Lwt_stream.map
|
||||||
(fun (hash, block) -> convert_block hash block.Store.shell)
|
(fun (hash, block) -> convert_block hash block.Store.Block_header.shell)
|
||||||
stream,
|
stream,
|
||||||
shutdown
|
shutdown
|
||||||
|
|
||||||
let valid_block_watcher node =
|
let valid_block_watcher node =
|
||||||
State.Valid_block.create_watcher node.state >|= fun (stream, shutdown) ->
|
let stream, shutdown = Validator.watcher node.validator in
|
||||||
Lwt_stream.map
|
Lwt_stream.map (fun block -> convert block) stream,
|
||||||
(fun block -> convert block)
|
|
||||||
stream,
|
|
||||||
shutdown
|
shutdown
|
||||||
|
|
||||||
let operation_watcher node =
|
let operation_watcher node =
|
||||||
State.Operation.create_watcher node.state
|
Distributed_db.watch_operation node.distributed_db
|
||||||
|
|
||||||
let protocol_watcher node =
|
let protocol_watcher node =
|
||||||
State.Protocol.create_watcher node.state
|
Distributed_db.watch_protocol node.distributed_db
|
||||||
|
|
||||||
let validate node net_id block =
|
let validate node net_id block =
|
||||||
Validator.get node.validator net_id >>=? fun net_v ->
|
Validator.get node.validator net_id >>=? fun net_v ->
|
||||||
@ -596,54 +492,54 @@ module RPC = struct
|
|||||||
|
|
||||||
module Network = struct
|
module Network = struct
|
||||||
let stat (node : t) =
|
let stat (node : t) =
|
||||||
Tezos_p2p.RPC.stat node.p2p
|
P2p.RPC.stat node.p2p
|
||||||
|
|
||||||
let watch (node : t) =
|
let watch (node : t) =
|
||||||
Tezos_p2p.RPC.watch node.p2p
|
P2p.RPC.watch node.p2p
|
||||||
|
|
||||||
let connect (node : t) =
|
let connect (node : t) =
|
||||||
Tezos_p2p.RPC.connect node.p2p
|
P2p.RPC.connect node.p2p
|
||||||
|
|
||||||
module Connection = struct
|
module Connection = struct
|
||||||
let info (node : t) =
|
let info (node : t) =
|
||||||
Tezos_p2p.RPC.Connection.info node.p2p
|
P2p.RPC.Connection.info node.p2p
|
||||||
|
|
||||||
let kick (node : t) =
|
let kick (node : t) =
|
||||||
Tezos_p2p.RPC.Connection.kick node.p2p
|
P2p.RPC.Connection.kick node.p2p
|
||||||
|
|
||||||
let list (node : t) =
|
let list (node : t) =
|
||||||
Tezos_p2p.RPC.Connection.list node.p2p
|
P2p.RPC.Connection.list node.p2p
|
||||||
|
|
||||||
let count (node : t) =
|
let count (node : t) =
|
||||||
Tezos_p2p.RPC.Connection.count node.p2p
|
P2p.RPC.Connection.count node.p2p
|
||||||
end
|
end
|
||||||
|
|
||||||
module Point = struct
|
module Point = struct
|
||||||
let info (node : t) =
|
let info (node : t) =
|
||||||
Tezos_p2p.RPC.Point.info node.p2p
|
P2p.RPC.Point.info node.p2p
|
||||||
|
|
||||||
let infos (node : t) restrict =
|
let infos (node : t) restrict =
|
||||||
Tezos_p2p.RPC.Point.infos ~restrict node.p2p
|
P2p.RPC.Point.infos ~restrict node.p2p
|
||||||
|
|
||||||
let events (node : t) =
|
let events (node : t) =
|
||||||
Tezos_p2p.RPC.Point.events node.p2p
|
P2p.RPC.Point.events node.p2p
|
||||||
|
|
||||||
let watch (node : t) =
|
let watch (node : t) =
|
||||||
Tezos_p2p.RPC.Point.watch node.p2p
|
P2p.RPC.Point.watch node.p2p
|
||||||
end
|
end
|
||||||
|
|
||||||
module Peer_id = struct
|
module Peer_id = struct
|
||||||
let info (node : t) =
|
let info (node : t) =
|
||||||
Tezos_p2p.RPC.Peer_id.info node.p2p
|
P2p.RPC.Peer_id.info node.p2p
|
||||||
|
|
||||||
let infos (node : t) restrict =
|
let infos (node : t) restrict =
|
||||||
Tezos_p2p.RPC.Peer_id.infos ~restrict node.p2p
|
P2p.RPC.Peer_id.infos ~restrict node.p2p
|
||||||
|
|
||||||
let events (node : t) =
|
let events (node : t) =
|
||||||
Tezos_p2p.RPC.Peer_id.events node.p2p
|
P2p.RPC.Peer_id.events node.p2p
|
||||||
|
|
||||||
let watch (node : t) =
|
let watch (node : t) =
|
||||||
Tezos_p2p.RPC.Peer_id.watch node.p2p
|
P2p.RPC.Peer_id.watch node.p2p
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
type t
|
type t
|
||||||
|
|
||||||
type config = {
|
type config = {
|
||||||
genesis: Store.genesis ;
|
genesis: State.Net.genesis ;
|
||||||
store_root: string ;
|
store_root: string ;
|
||||||
context_root: string ;
|
context_root: string ;
|
||||||
test_protocol: Protocol_hash.t option ;
|
test_protocol: Protocol_hash.t option ;
|
||||||
@ -26,19 +26,22 @@ module RPC : sig
|
|||||||
type block_info = Node_rpc_services.Blocks.block_info
|
type block_info = Node_rpc_services.Blocks.block_info
|
||||||
|
|
||||||
val inject_block:
|
val inject_block:
|
||||||
t -> ?force:bool -> MBytes.t -> (Block_hash.t * unit tzresult Lwt.t) Lwt.t
|
t -> ?force:bool -> MBytes.t ->
|
||||||
|
(Block_hash.t * unit tzresult Lwt.t) tzresult Lwt.t
|
||||||
val inject_operation:
|
val inject_operation:
|
||||||
t -> ?force:bool -> MBytes.t -> (Operation_hash.t * unit tzresult Lwt.t) Lwt.t
|
t -> ?force:bool -> MBytes.t ->
|
||||||
|
(Operation_hash.t * unit tzresult Lwt.t) Lwt.t
|
||||||
val inject_protocol:
|
val inject_protocol:
|
||||||
t -> ?force:bool -> Store.protocol -> (Protocol_hash.t * unit tzresult Lwt.t) Lwt.t
|
t -> ?force:bool -> Tezos_compiler.Protocol.t ->
|
||||||
|
(Protocol_hash.t * unit tzresult Lwt.t) Lwt.t
|
||||||
|
|
||||||
val raw_block_info:
|
val raw_block_info:
|
||||||
t -> Block_hash.t -> block_info Lwt.t
|
t -> Block_hash.t -> block_info Lwt.t
|
||||||
val block_watcher:
|
val block_watcher:
|
||||||
t -> block_info Lwt_stream.t * Watcher.stopper
|
t -> block_info Lwt_stream.t * Watcher.stopper
|
||||||
val valid_block_watcher:
|
val valid_block_watcher:
|
||||||
t -> (block_info Lwt_stream.t * Watcher.stopper) Lwt.t
|
t -> (block_info Lwt_stream.t * Watcher.stopper)
|
||||||
val heads: t -> block_info Block_hash_map.t Lwt.t
|
val heads: t -> block_info Block_hash.Map.t Lwt.t
|
||||||
|
|
||||||
val list:
|
val list:
|
||||||
t -> int -> Block_hash.t list -> block_info list list Lwt.t
|
t -> int -> Block_hash.t list -> block_info list list Lwt.t
|
||||||
@ -49,19 +52,19 @@ module RPC : sig
|
|||||||
val operations:
|
val operations:
|
||||||
t -> block -> Operation_hash.t list Lwt.t
|
t -> block -> Operation_hash.t list Lwt.t
|
||||||
val operation_content:
|
val operation_content:
|
||||||
t -> Operation_hash.t -> Store.operation tzresult Time.timed_data option Lwt.t
|
t -> Operation_hash.t -> Store.Operation.t option Lwt.t
|
||||||
val operation_watcher:
|
val operation_watcher:
|
||||||
t -> (Operation_hash.t * Store.operation) Lwt_stream.t * Watcher.stopper
|
t -> (Operation_hash.t * Store.Operation.t) Lwt_stream.t * Watcher.stopper
|
||||||
|
|
||||||
val pending_operations:
|
val pending_operations:
|
||||||
t -> block -> (error Updater.preapply_result * Operation_hash_set.t) Lwt.t
|
t -> block -> (error Updater.preapply_result * Operation_hash.Set.t) Lwt.t
|
||||||
|
|
||||||
val protocols:
|
val protocols:
|
||||||
t -> Protocol_hash.t list Lwt.t
|
t -> Protocol_hash.t list Lwt.t
|
||||||
val protocol_content:
|
val protocol_content:
|
||||||
t -> Protocol_hash.t -> Store.protocol tzresult Time.timed_data option Lwt.t
|
t -> Protocol_hash.t -> Tezos_compiler.Protocol.t tzresult Lwt.t
|
||||||
val protocol_watcher:
|
val protocol_watcher:
|
||||||
t -> (Protocol_hash.t * Store.protocol) Lwt_stream.t * Watcher.stopper
|
t -> (Protocol_hash.t * Tezos_compiler.Protocol.t) Lwt_stream.t * Watcher.stopper
|
||||||
|
|
||||||
val context_dir:
|
val context_dir:
|
||||||
t -> block -> 'a RPC.directory option Lwt.t
|
t -> block -> 'a RPC.directory option Lwt.t
|
||||||
@ -72,7 +75,7 @@ module RPC : sig
|
|||||||
Operation_hash.t list ->
|
Operation_hash.t list ->
|
||||||
(Protocol.fitness * error Updater.preapply_result) tzresult Lwt.t
|
(Protocol.fitness * error Updater.preapply_result) tzresult Lwt.t
|
||||||
|
|
||||||
val validate: t -> State.net_id -> Block_hash.t -> unit tzresult Lwt.t
|
val validate: t -> State.Net_id.t -> Block_hash.t -> unit tzresult Lwt.t
|
||||||
|
|
||||||
val context_dir:
|
val context_dir:
|
||||||
t -> block -> 'a RPC.directory option Lwt.t
|
t -> block -> 'a RPC.directory option Lwt.t
|
||||||
|
@ -120,27 +120,27 @@ let create_delayed_stream
|
|||||||
let stream, push = Lwt_stream.create () in
|
let stream, push = Lwt_stream.create () in
|
||||||
let current_blocks =
|
let current_blocks =
|
||||||
ref (List.fold_left
|
ref (List.fold_left
|
||||||
(fun acc h -> Block_hash_set.add h acc)
|
(fun acc h -> Block_hash.Set.add h acc)
|
||||||
Block_hash_set.empty requested_heads) in
|
Block_hash.Set.empty requested_heads) in
|
||||||
let next_future_block, is_futur_block,
|
let next_future_block, is_futur_block,
|
||||||
insert_future_block, pop_future_block =
|
insert_future_block, pop_future_block =
|
||||||
let future_blocks = ref [] in (* FIXME *)
|
let future_blocks = ref [] in (* FIXME *)
|
||||||
let future_blocks_set = ref Block_hash_set.empty in
|
let future_blocks_set = ref Block_hash.Set.empty in
|
||||||
let next () =
|
let next () =
|
||||||
match !future_blocks with
|
match !future_blocks with
|
||||||
| [] -> None
|
| [] -> None
|
||||||
| bi :: _ -> Some bi
|
| bi :: _ -> Some bi
|
||||||
and mem hash = Block_hash_set.mem hash !future_blocks_set
|
and mem hash = Block_hash.Set.mem hash !future_blocks_set
|
||||||
and insert bi =
|
and insert bi =
|
||||||
future_blocks := insert_future_block bi !future_blocks ;
|
future_blocks := insert_future_block bi !future_blocks ;
|
||||||
future_blocks_set :=
|
future_blocks_set :=
|
||||||
Block_hash_set.add bi.hash !future_blocks_set
|
Block_hash.Set.add bi.hash !future_blocks_set
|
||||||
and pop time =
|
and pop time =
|
||||||
match !future_blocks with
|
match !future_blocks with
|
||||||
| {timestamp} as bi :: rest when Time.(timestamp <= time) ->
|
| {timestamp} as bi :: rest when Time.(timestamp <= time) ->
|
||||||
future_blocks := rest ;
|
future_blocks := rest ;
|
||||||
future_blocks_set :=
|
future_blocks_set :=
|
||||||
Block_hash_set.remove bi.hash !future_blocks_set ;
|
Block_hash.Set.remove bi.hash !future_blocks_set ;
|
||||||
Some bi
|
Some bi
|
||||||
| _ -> None in
|
| _ -> None in
|
||||||
next, mem, insert, pop in
|
next, mem, insert, pop in
|
||||||
@ -168,7 +168,7 @@ let create_delayed_stream
|
|||||||
lwt_debug "WWW worker_loop Some" >>= fun () ->
|
lwt_debug "WWW worker_loop Some" >>= fun () ->
|
||||||
begin
|
begin
|
||||||
if not filtering
|
if not filtering
|
||||||
|| Block_hash_set.mem bi.predecessor !current_blocks
|
|| Block_hash.Set.mem bi.predecessor !current_blocks
|
||||||
|| is_futur_block bi.predecessor
|
|| is_futur_block bi.predecessor
|
||||||
then begin
|
then begin
|
||||||
let time = Time.(add (now ()) (Int64.of_int ~-delay)) in
|
let time = Time.(add (now ()) (Int64.of_int ~-delay)) in
|
||||||
@ -177,8 +177,8 @@ let create_delayed_stream
|
|||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
end else begin
|
end else begin
|
||||||
current_blocks :=
|
current_blocks :=
|
||||||
Block_hash_set.remove bi.predecessor !current_blocks
|
Block_hash.Set.remove bi.predecessor !current_blocks
|
||||||
|> Block_hash_set.add bi.hash ;
|
|> Block_hash.Set.add bi.hash ;
|
||||||
push (Some [[filter_bi include_ops bi]]) ;
|
push (Some [[filter_bi include_ops bi]]) ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
end
|
end
|
||||||
@ -219,7 +219,7 @@ let list_blocks
|
|||||||
match heads with
|
match heads with
|
||||||
| None ->
|
| None ->
|
||||||
Node.RPC.heads node >>= fun heads ->
|
Node.RPC.heads node >>= fun heads ->
|
||||||
let heads = List.map snd (Block_hash_map.bindings heads) in
|
let heads = List.map snd (Block_hash.Map.bindings heads) in
|
||||||
let heads =
|
let heads =
|
||||||
match min_date with
|
match min_date with
|
||||||
| None -> heads
|
| None -> heads
|
||||||
@ -271,7 +271,7 @@ let list_blocks
|
|||||||
requested_blocks in
|
requested_blocks in
|
||||||
RPC.Answer.return infos
|
RPC.Answer.return infos
|
||||||
else begin
|
else begin
|
||||||
Node.RPC.valid_block_watcher node >>= fun (bi_stream, stopper) ->
|
let (bi_stream, stopper) = Node.RPC.valid_block_watcher node in
|
||||||
let stream =
|
let stream =
|
||||||
match delay with
|
match delay with
|
||||||
| None ->
|
| None ->
|
||||||
@ -301,10 +301,8 @@ let list_operations node {Services.Operations.monitor; contents} =
|
|||||||
Lwt_list.map_p
|
Lwt_list.map_p
|
||||||
(fun hash ->
|
(fun hash ->
|
||||||
if include_ops then
|
if include_ops then
|
||||||
Node.RPC.operation_content node hash >>= function
|
Node.RPC.operation_content node hash >>= fun op ->
|
||||||
| None | Some { Time.data = Error _ } -> Lwt.return (hash, None)
|
Lwt.return (hash, op)
|
||||||
| Some { Time.data = Ok bytes }->
|
|
||||||
Lwt.return (hash, Some bytes)
|
|
||||||
else
|
else
|
||||||
Lwt.return (hash, None))
|
Lwt.return (hash, None))
|
||||||
operations >>= fun operations ->
|
operations >>= fun operations ->
|
||||||
@ -339,9 +337,8 @@ let list_protocols node {Services.Protocols.monitor; contents} =
|
|||||||
(fun hash ->
|
(fun hash ->
|
||||||
if include_contents then
|
if include_contents then
|
||||||
Node.RPC.protocol_content node hash >>= function
|
Node.RPC.protocol_content node hash >>= function
|
||||||
| None | Some { Time.data = Error _ } -> Lwt.return (hash, None)
|
| Error _ -> Lwt.return (hash, None)
|
||||||
| Some { Time.data = Ok bytes }->
|
| Ok bytes -> Lwt.return (hash, Some bytes)
|
||||||
Lwt.return (hash, Some bytes)
|
|
||||||
else
|
else
|
||||||
Lwt.return (hash, None))
|
Lwt.return (hash, None))
|
||||||
protocols >>= fun protocols ->
|
protocols >>= fun protocols ->
|
||||||
@ -365,8 +362,8 @@ let list_protocols node {Services.Protocols.monitor; contents} =
|
|||||||
|
|
||||||
let get_protocols node hash () =
|
let get_protocols node hash () =
|
||||||
Node.RPC.protocol_content node hash >>= function
|
Node.RPC.protocol_content node hash >>= function
|
||||||
| Some bytes -> RPC.Answer.return bytes
|
| Ok bytes -> RPC.Answer.return bytes
|
||||||
| None -> raise Not_found
|
| Error _ -> raise Not_found
|
||||||
|
|
||||||
let build_rpc_directory node =
|
let build_rpc_directory node =
|
||||||
let dir = RPC.empty in
|
let dir = RPC.empty in
|
||||||
@ -398,7 +395,7 @@ let build_rpc_directory node =
|
|||||||
let net_id = Utils.unopt ~default:bi.net net_id in
|
let net_id = Utils.unopt ~default:bi.net net_id in
|
||||||
let predecessor = Utils.unopt ~default:bi.hash pred in
|
let predecessor = Utils.unopt ~default:bi.hash pred in
|
||||||
let res =
|
let res =
|
||||||
Store.Block.to_bytes {
|
Data_encoding.Binary.to_bytes Store.Block_header.encoding {
|
||||||
shell = { net_id ; predecessor ; timestamp ; fitness ; operations } ;
|
shell = { net_id ; predecessor ; timestamp ; fitness ; operations } ;
|
||||||
proto = header ;
|
proto = header ;
|
||||||
} in
|
} in
|
||||||
@ -411,8 +408,8 @@ let build_rpc_directory node =
|
|||||||
RPC.register0 dir Services.validate_block implementation in
|
RPC.register0 dir Services.validate_block implementation in
|
||||||
let dir =
|
let dir =
|
||||||
let implementation (block, blocking, force) =
|
let implementation (block, blocking, force) =
|
||||||
Node.RPC.inject_block node ?force block >>= fun (hash, wait) ->
|
|
||||||
begin
|
begin
|
||||||
|
Node.RPC.inject_block node ?force block >>=? fun (hash, wait) ->
|
||||||
(if blocking then wait else return ()) >>=? fun () -> return hash
|
(if blocking then wait else return ()) >>=? fun () -> return hash
|
||||||
end >>= RPC.Answer.return in
|
end >>= RPC.Answer.return in
|
||||||
RPC.register0 dir Services.inject_block implementation in
|
RPC.register0 dir Services.inject_block implementation in
|
||||||
|
@ -54,10 +54,10 @@ module Blocks = struct
|
|||||||
| `Hash of Block_hash.t
|
| `Hash of Block_hash.t
|
||||||
]
|
]
|
||||||
|
|
||||||
type net = Store.net_id = Net of Block_hash.t
|
type net = State.Net_id.t = Id of Block_hash.t
|
||||||
|
|
||||||
let net_encoding =
|
let net_encoding =
|
||||||
conv (fun (Net id) -> id) (fun id -> Net id) Block_hash.encoding
|
conv (fun (Id id) -> id) (fun id -> Id id) Block_hash.encoding
|
||||||
|
|
||||||
type block_info = {
|
type block_info = {
|
||||||
hash: Block_hash.t ;
|
hash: Block_hash.t ;
|
||||||
@ -254,22 +254,22 @@ module Blocks = struct
|
|||||||
(fun ({ Updater.applied; branch_delayed ; branch_refused },
|
(fun ({ Updater.applied; branch_delayed ; branch_refused },
|
||||||
unprocessed) ->
|
unprocessed) ->
|
||||||
(applied,
|
(applied,
|
||||||
Operation_hash_map.bindings branch_delayed,
|
Operation_hash.Map.bindings branch_delayed,
|
||||||
Operation_hash_map.bindings branch_refused,
|
Operation_hash.Map.bindings branch_refused,
|
||||||
Operation_hash_set.elements unprocessed))
|
Operation_hash.Set.elements unprocessed))
|
||||||
(fun (applied, branch_delayed, branch_refused, unprocessed) ->
|
(fun (applied, branch_delayed, branch_refused, unprocessed) ->
|
||||||
({ Updater.applied ; refused = Operation_hash_map.empty ;
|
({ Updater.applied ; refused = Operation_hash.Map.empty ;
|
||||||
branch_refused =
|
branch_refused =
|
||||||
List.fold_right
|
List.fold_right
|
||||||
(fun (k, o) -> Operation_hash_map.add k o)
|
(fun (k, o) -> Operation_hash.Map.add k o)
|
||||||
branch_refused Operation_hash_map.empty ;
|
branch_refused Operation_hash.Map.empty ;
|
||||||
branch_delayed =
|
branch_delayed =
|
||||||
List.fold_right
|
List.fold_right
|
||||||
(fun (k, o) -> Operation_hash_map.add k o)
|
(fun (k, o) -> Operation_hash.Map.add k o)
|
||||||
branch_delayed Operation_hash_map.empty ;
|
branch_delayed Operation_hash.Map.empty ;
|
||||||
},
|
},
|
||||||
List.fold_right Operation_hash_set.add
|
List.fold_right Operation_hash.Set.add
|
||||||
unprocessed Operation_hash_set.empty))
|
unprocessed Operation_hash.Set.empty))
|
||||||
(obj4
|
(obj4
|
||||||
(req "applied" (list Operation_hash.encoding))
|
(req "applied" (list Operation_hash.encoding))
|
||||||
(req "branch_delayed"
|
(req "branch_delayed"
|
||||||
@ -400,9 +400,7 @@ module Operations = struct
|
|||||||
~output:
|
~output:
|
||||||
(obj1 (req "data"
|
(obj1 (req "data"
|
||||||
(describe ~title: "Tezos signed operation (hex encoded)"
|
(describe ~title: "Tezos signed operation (hex encoded)"
|
||||||
(Time.timed_encoding @@
|
(Updater.raw_operation_encoding))))
|
||||||
Error.wrap @@
|
|
||||||
Updater.raw_operation_encoding))))
|
|
||||||
RPC.Path.(root / "operations" /: operations_arg)
|
RPC.Path.(root / "operations" /: operations_arg)
|
||||||
|
|
||||||
type list_param = {
|
type list_param = {
|
||||||
@ -451,9 +449,7 @@ module Protocols = struct
|
|||||||
~output:
|
~output:
|
||||||
(obj1 (req "data"
|
(obj1 (req "data"
|
||||||
(describe ~title: "Tezos protocol"
|
(describe ~title: "Tezos protocol"
|
||||||
(Time.timed_encoding @@
|
(Store.Protocol.encoding))))
|
||||||
Error.wrap @@
|
|
||||||
Store.protocol_encoding))))
|
|
||||||
RPC.Path.(root / "protocols" /: protocols_arg)
|
RPC.Path.(root / "protocols" /: protocols_arg)
|
||||||
|
|
||||||
type list_param = {
|
type list_param = {
|
||||||
@ -479,7 +475,7 @@ module Protocols = struct
|
|||||||
(obj2
|
(obj2
|
||||||
(req "hash" Protocol_hash.encoding)
|
(req "hash" Protocol_hash.encoding)
|
||||||
(opt "contents"
|
(opt "contents"
|
||||||
(dynamic_size Store.protocol_encoding)))
|
(dynamic_size Store.Protocol.encoding)))
|
||||||
)))
|
)))
|
||||||
RPC.Path.(root / "protocols")
|
RPC.Path.(root / "protocols")
|
||||||
end
|
end
|
||||||
@ -616,7 +612,7 @@ let forge_block =
|
|||||||
~description: "Forge a block header"
|
~description: "Forge a block header"
|
||||||
~input:
|
~input:
|
||||||
(obj6
|
(obj6
|
||||||
(opt "net_id" Updater.net_id_encoding)
|
(opt "net_id" Updater.Net_id.encoding)
|
||||||
(opt "predecessor" Block_hash.encoding)
|
(opt "predecessor" Block_hash.encoding)
|
||||||
(opt "timestamp" Time.encoding)
|
(opt "timestamp" Time.encoding)
|
||||||
(req "fitness" Fitness.encoding)
|
(req "fitness" Fitness.encoding)
|
||||||
|
@ -24,7 +24,7 @@ module Blocks : sig
|
|||||||
val blocks_arg : block RPC.Arg.arg
|
val blocks_arg : block RPC.Arg.arg
|
||||||
|
|
||||||
val parse_block: string -> (block, string) result
|
val parse_block: string -> (block, string) result
|
||||||
type net = Store.net_id = Net of Block_hash.t
|
type net = State.Net_id.t = Id of Block_hash.t
|
||||||
|
|
||||||
type block_info = {
|
type block_info = {
|
||||||
hash: Block_hash.t ;
|
hash: Block_hash.t ;
|
||||||
@ -60,7 +60,7 @@ module Blocks : sig
|
|||||||
(unit, unit * block, unit, (net * Time.t) option) RPC.service
|
(unit, unit * block, unit, (net * Time.t) option) RPC.service
|
||||||
val pending_operations:
|
val pending_operations:
|
||||||
(unit, unit * block, unit,
|
(unit, unit * block, unit,
|
||||||
error Updater.preapply_result * Hash.Operation_hash_set.t) RPC.service
|
error Updater.preapply_result * Hash.Operation_hash.Set.t) RPC.service
|
||||||
|
|
||||||
type list_param = {
|
type list_param = {
|
||||||
operations: bool option ;
|
operations: bool option ;
|
||||||
@ -95,28 +95,27 @@ end
|
|||||||
|
|
||||||
module Operations : sig
|
module Operations : sig
|
||||||
val bytes:
|
val bytes:
|
||||||
(unit, unit * Operation_hash.t, unit,
|
(unit, unit * Operation_hash.t, unit, State.Operation.t) RPC.service
|
||||||
Store.operation tzresult Time.timed_data) RPC.service
|
|
||||||
type list_param = {
|
type list_param = {
|
||||||
contents: bool option ;
|
contents: bool option ;
|
||||||
monitor: bool option ;
|
monitor: bool option ;
|
||||||
}
|
}
|
||||||
val list:
|
val list:
|
||||||
(unit, unit,
|
(unit, unit,
|
||||||
list_param, (Operation_hash.t * Store.operation option) list) RPC.service
|
list_param, (Operation_hash.t * Store.Operation.t option) list) RPC.service
|
||||||
end
|
end
|
||||||
|
|
||||||
module Protocols : sig
|
module Protocols : sig
|
||||||
val bytes:
|
val bytes:
|
||||||
(unit, unit * Protocol_hash.t, unit,
|
(unit, unit * Protocol_hash.t, unit, Tezos_compiler.Protocol.t) RPC.service
|
||||||
Store.protocol tzresult Time.timed_data) RPC.service
|
|
||||||
type list_param = {
|
type list_param = {
|
||||||
contents: bool option ;
|
contents: bool option ;
|
||||||
monitor: bool option ;
|
monitor: bool option ;
|
||||||
}
|
}
|
||||||
val list:
|
val list:
|
||||||
(unit, unit,
|
(unit, unit,
|
||||||
list_param, (Protocol_hash.t * Store.protocol option) list) RPC.service
|
list_param,
|
||||||
|
(Protocol_hash.t * Tezos_compiler.Protocol.t option) list) RPC.service
|
||||||
end
|
end
|
||||||
|
|
||||||
module Network : sig
|
module Network : sig
|
||||||
@ -161,7 +160,7 @@ end
|
|||||||
|
|
||||||
val forge_block:
|
val forge_block:
|
||||||
(unit, unit,
|
(unit, unit,
|
||||||
Updater.net_id option * Block_hash.t option * Time.t option *
|
Updater.Net_id.t option * Block_hash.t option * Time.t option *
|
||||||
Fitness.fitness * Operation_hash.t list * MBytes.t,
|
Fitness.fitness * Operation_hash.t list * MBytes.t,
|
||||||
MBytes.t) RPC.service
|
MBytes.t) RPC.service
|
||||||
|
|
||||||
@ -179,7 +178,8 @@ val inject_operation:
|
|||||||
|
|
||||||
val inject_protocol:
|
val inject_protocol:
|
||||||
(unit, unit,
|
(unit, unit,
|
||||||
(Store.protocol * bool * bool option), Protocol_hash.t tzresult) RPC.service
|
(Tezos_compiler.Protocol.t * bool * bool option),
|
||||||
|
Protocol_hash.t tzresult) RPC.service
|
||||||
|
|
||||||
val complete: (unit, unit * string, unit, string list) RPC.service
|
val complete: (unit, unit * string, unit, string list) RPC.service
|
||||||
|
|
||||||
|
@ -7,19 +7,19 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
open Logging.Node.Prevalidator
|
open Logging.Node.Prevalidator
|
||||||
|
|
||||||
let preapply
|
let preapply
|
||||||
st ctxt (module Proto : Updater.REGISTRED_PROTOCOL) block timestamp sort ops =
|
net_db ctxt (module Proto : Updater.REGISTRED_PROTOCOL)
|
||||||
|
block timestamp sort ops =
|
||||||
lwt_debug "-> prevalidate (%d)" (List.length ops) >>= fun () ->
|
lwt_debug "-> prevalidate (%d)" (List.length ops) >>= fun () ->
|
||||||
(* The operations list length is bounded by the size of the mempool,
|
(* The operations list length is bounded by the size of the mempool,
|
||||||
where eventually an operation should not stay more than one hours. *)
|
where eventually an operation should not stay more than one hours. *)
|
||||||
Lwt_list.map_p
|
Lwt_list.map_p
|
||||||
(fun h ->
|
(fun h ->
|
||||||
State.Operation.read st h >>= function
|
Distributed_db.Operation.read net_db h >>= function
|
||||||
| None | Some { data = Error _ } ->
|
| None -> Lwt.return_none
|
||||||
Lwt.return_none
|
| Some op ->
|
||||||
| Some { data = Ok op } ->
|
|
||||||
match Proto.parse_operation h op with
|
match Proto.parse_operation h op with
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
(* the operation will never be validated in the
|
(* the operation will never be validated in the
|
||||||
@ -32,50 +32,76 @@ let preapply
|
|||||||
| Ok (ctxt, r) ->
|
| Ok (ctxt, r) ->
|
||||||
lwt_debug "<- prevalidate (%d/%d/%d/%d)"
|
lwt_debug "<- prevalidate (%d/%d/%d/%d)"
|
||||||
(List.length r.Updater.applied)
|
(List.length r.Updater.applied)
|
||||||
(Operation_hash_map.cardinal r.Updater.refused)
|
(Operation_hash.Map.cardinal r.Updater.refused)
|
||||||
(Operation_hash_map.cardinal r.Updater.branch_refused)
|
(Operation_hash.Map.cardinal r.Updater.branch_refused)
|
||||||
(Operation_hash_map.cardinal r.Updater.branch_delayed) >>= fun () ->
|
(Operation_hash.Map.cardinal r.Updater.branch_delayed) >>= fun () ->
|
||||||
Lwt.return (Ok (ctxt, r))
|
Lwt.return (Ok (ctxt, r))
|
||||||
| Error errors ->
|
| Error errors ->
|
||||||
(* FIXME report internal error *)
|
(* FIXME report internal error *)
|
||||||
lwt_debug "<- prevalidate (internal error)" >>= fun () ->
|
lwt_debug "<- prevalidate (internal error)" >>= fun () ->
|
||||||
Lwt.return (Error errors)
|
Lwt.return (Error errors)
|
||||||
|
|
||||||
|
let list_pendings net_db ~from_block ~to_block old_mempool =
|
||||||
|
let rec pop_blocks ancestor hash mempool =
|
||||||
|
if Block_hash.equal hash ancestor then
|
||||||
|
Lwt.return mempool
|
||||||
|
else
|
||||||
|
Distributed_db.Block_header.read_exn net_db hash >>= fun { shell } ->
|
||||||
|
let mempool =
|
||||||
|
List.fold_left
|
||||||
|
(fun mempool h -> Operation_hash.Set.add h mempool)
|
||||||
|
mempool shell.operations in
|
||||||
|
pop_blocks ancestor shell.predecessor mempool
|
||||||
|
in
|
||||||
|
let push_block mempool (_hash, shell) =
|
||||||
|
List.fold_left
|
||||||
|
(fun mempool h -> Operation_hash.Set.remove h mempool)
|
||||||
|
mempool shell.Store.Block_header.operations
|
||||||
|
in
|
||||||
|
let net_state = Distributed_db.state net_db in
|
||||||
|
State.Valid_block.Current.new_blocks
|
||||||
|
net_state ~from_block ~to_block >>= fun (ancestor, path) ->
|
||||||
|
pop_blocks ancestor from_block.hash old_mempool >>= fun mempool ->
|
||||||
|
let new_mempool = List.fold_left push_block mempool path in
|
||||||
|
Lwt.return new_mempool
|
||||||
|
|
||||||
|
|
||||||
(** Worker *)
|
(** Worker *)
|
||||||
|
|
||||||
exception Invalid_operation of Operation_hash.t
|
exception Invalid_operation of Operation_hash.t
|
||||||
|
|
||||||
type t = {
|
type t = {
|
||||||
net: State.Net.t ;
|
net_db: Distributed_db.net ;
|
||||||
flush: unit -> unit;
|
flush: State.Valid_block.t -> unit;
|
||||||
register_operation: Operation_hash.t -> unit ;
|
notify_operation: P2p.Peer_id.t -> Operation_hash.t -> unit ;
|
||||||
prevalidate_operations:
|
prevalidate_operations:
|
||||||
bool -> Store.operation list ->
|
bool -> Store.Operation.t list ->
|
||||||
(Operation_hash.t list * error Updater.preapply_result) tzresult Lwt.t ;
|
(Operation_hash.t list * error Updater.preapply_result) tzresult Lwt.t ;
|
||||||
operations: unit -> error Updater.preapply_result * Operation_hash_set.t ;
|
operations: unit -> error Updater.preapply_result * Operation_hash.Set.t ;
|
||||||
|
pending: ?block:State.Valid_block.t -> unit -> Operation_hash.Set.t Lwt.t ;
|
||||||
timestamp: unit -> Time.t ;
|
timestamp: unit -> Time.t ;
|
||||||
context: unit -> Context.t ;
|
context: unit -> Context.t ;
|
||||||
protocol: unit -> (module Updater.REGISTRED_PROTOCOL) ;
|
protocol: unit -> (module Updater.REGISTRED_PROTOCOL) ;
|
||||||
shutdown: unit -> unit Lwt.t ;
|
shutdown: unit -> unit Lwt.t ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
let merge _key a b =
|
let merge _key a b =
|
||||||
match a, b with
|
match a, b with
|
||||||
| None, None -> None
|
| None, None -> None
|
||||||
| Some x, None -> Some x
|
| Some x, None -> Some x
|
||||||
| _, Some y -> Some y
|
| _, Some y -> Some y
|
||||||
|
|
||||||
let create p2p net =
|
let create net_db =
|
||||||
|
|
||||||
let st = State.Net.state net in
|
let net_state = Distributed_db.state net_db in
|
||||||
|
|
||||||
let cancelation, cancel, _on_cancel = Lwt_utils.canceler () in
|
let cancelation, cancel, _on_cancel = Lwt_utils.canceler () in
|
||||||
let push_to_worker, worker_waiter = Lwt_utils.queue () in
|
let push_to_worker, worker_waiter = Lwt_utils.queue () in
|
||||||
|
|
||||||
State.Net.Blockchain.head net >>= fun head ->
|
State.Valid_block.Current.head net_state >>= fun head ->
|
||||||
State.Net.Blockchain.protocol net >>= fun protocol ->
|
State.Valid_block.Current.protocol net_state >>= fun protocol ->
|
||||||
State.Net.Mempool.get net >>= fun mempool ->
|
State.Operation.list_pending net_state >>= fun initial_mempool ->
|
||||||
let timestamp = ref (Time.now ()) in
|
let timestamp = ref (Time.now ()) in
|
||||||
begin
|
begin
|
||||||
let (module Proto) = protocol in
|
let (module Proto) = protocol in
|
||||||
@ -84,10 +110,10 @@ let create p2p net =
|
|||||||
| Ok (ctxt, _) -> ref ctxt
|
| Ok (ctxt, _) -> ref ctxt
|
||||||
end >>= fun context ->
|
end >>= fun context ->
|
||||||
let protocol = ref protocol in
|
let protocol = ref protocol in
|
||||||
let head = ref head.hash in
|
let head = ref head in
|
||||||
let operations = ref Updater.empty_result in
|
let operations = ref Updater.empty_result in
|
||||||
let running_validation = ref Lwt.return_unit in
|
let running_validation = ref Lwt.return_unit in
|
||||||
let unprocessed = ref mempool in
|
let unprocessed = ref initial_mempool in
|
||||||
let broadcast_unprocessed = ref false in
|
let broadcast_unprocessed = ref false in
|
||||||
|
|
||||||
let set_context ctxt =
|
let set_context ctxt =
|
||||||
@ -95,71 +121,55 @@ let create p2p net =
|
|||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
|
|
||||||
let broadcast_operation ops =
|
let broadcast_operation ops =
|
||||||
Tezos_p2p.broadcast p2p (Operation_inventory (State.Net.id net, ops)) in
|
Distributed_db.broadcast_head net_db !head.hash ops in
|
||||||
|
|
||||||
let handle_unprocessed () =
|
let handle_unprocessed () =
|
||||||
if Operation_hash_set.is_empty !unprocessed then
|
if Operation_hash.Set.is_empty !unprocessed then
|
||||||
Lwt.return ()
|
Lwt.return ()
|
||||||
else
|
else
|
||||||
(* We assume that `!unprocessed` does not contain any operations
|
(* We assume that `!unprocessed` does not contain any operations
|
||||||
from `!operations`. *)
|
from `!operations`. *)
|
||||||
let ops = !unprocessed in
|
let ops = !unprocessed in
|
||||||
let broadcast = !broadcast_unprocessed in
|
let broadcast = !broadcast_unprocessed in
|
||||||
unprocessed := Operation_hash_set.empty ;
|
unprocessed := Operation_hash.Set.empty ;
|
||||||
broadcast_unprocessed := false ;
|
broadcast_unprocessed := false ;
|
||||||
running_validation := begin
|
running_validation := begin
|
||||||
begin
|
begin
|
||||||
preapply
|
preapply
|
||||||
st !context !protocol !head !timestamp true
|
net_db !context !protocol !head.hash !timestamp true
|
||||||
(Operation_hash_set.elements ops) >>= function
|
(Operation_hash.Set.elements ops) >>= function
|
||||||
| Ok (ctxt, r) -> Lwt.return (ctxt, r)
|
| Ok (ctxt, r) -> Lwt.return (ctxt, r)
|
||||||
| Error err ->
|
| Error err ->
|
||||||
let r =
|
let r =
|
||||||
{ Updater.empty_result with
|
{ Updater.empty_result with
|
||||||
branch_delayed =
|
branch_delayed =
|
||||||
Operation_hash_set.fold
|
Operation_hash.Set.fold
|
||||||
(fun op m -> Operation_hash_map.add op err m)
|
(fun op m -> Operation_hash.Map.add op err m)
|
||||||
ops Operation_hash_map.empty ; } in
|
ops Operation_hash.Map.empty ; } in
|
||||||
Lwt.return (!context, r)
|
Lwt.return (!context, r)
|
||||||
end >>= fun (ctxt, r) ->
|
end >>= fun (ctxt, r) ->
|
||||||
let filter_out s m =
|
let filter_out s m =
|
||||||
List.fold_right Operation_hash_map.remove s m in
|
List.fold_right Operation_hash.Map.remove s m in
|
||||||
operations := {
|
operations := {
|
||||||
Updater.applied = List.rev_append r.applied !operations.applied ;
|
Updater.applied = List.rev_append r.applied !operations.applied ;
|
||||||
refused = Operation_hash_map.empty ;
|
refused = Operation_hash.Map.empty ;
|
||||||
branch_refused =
|
branch_refused =
|
||||||
Operation_hash_map.merge merge
|
Operation_hash.Map.merge merge
|
||||||
(* filter_out should not be required here, TODO warn ? *)
|
(* filter_out should not be required here, TODO warn ? *)
|
||||||
(filter_out r.applied !operations.branch_refused)
|
(filter_out r.applied !operations.branch_refused)
|
||||||
r.branch_refused ;
|
r.branch_refused ;
|
||||||
branch_delayed =
|
branch_delayed =
|
||||||
Operation_hash_map.merge merge
|
Operation_hash.Map.merge merge
|
||||||
(filter_out r.applied !operations.branch_delayed)
|
(filter_out r.applied !operations.branch_delayed)
|
||||||
r.branch_delayed ;
|
r.branch_delayed ;
|
||||||
} ;
|
} ;
|
||||||
(* Update the Mempool *)
|
|
||||||
Lwt_list.iter_s
|
|
||||||
(fun op ->
|
|
||||||
State.Net.Mempool.add net op >>= fun _ ->
|
|
||||||
Lwt.return_unit)
|
|
||||||
r.Updater.applied >>= fun () ->
|
|
||||||
if broadcast then broadcast_operation r.Updater.applied ;
|
if broadcast then broadcast_operation r.Updater.applied ;
|
||||||
Lwt_list.iter_s
|
Lwt_list.iter_s
|
||||||
(fun (op, _exns) ->
|
(fun (_op, _exns) ->
|
||||||
State.Net.Mempool.add net op >>= fun _ ->
|
(* FIXME *)
|
||||||
Lwt.return_unit)
|
(* Distributed_db.Operation.mark_invalid net_db op exns >>= fun _ -> *)
|
||||||
(Operation_hash_map.bindings r.Updater.branch_delayed) >>= fun () ->
|
|
||||||
Lwt_list.iter_s
|
|
||||||
(fun (op, _exns) ->
|
|
||||||
State.Net.Mempool.add net op >>= fun _ ->
|
|
||||||
Lwt.return_unit)
|
Lwt.return_unit)
|
||||||
(Operation_hash_map.bindings r.Updater.branch_refused) >>= fun () ->
|
(Operation_hash.Map.bindings r.Updater.refused) >>= fun () ->
|
||||||
Lwt_list.iter_s
|
|
||||||
(fun (op, exns) ->
|
|
||||||
State.Net.Mempool.remove net op >>= fun _ ->
|
|
||||||
State.Operation.mark_invalid st op exns >>= fun _ ->
|
|
||||||
Lwt.return_unit)
|
|
||||||
(Operation_hash_map.bindings r.Updater.refused) >>= fun () ->
|
|
||||||
(* TODO. Keep a bounded set of 'refused' operations. *)
|
(* TODO. Keep a bounded set of 'refused' operations. *)
|
||||||
(* TODO. Log the error in some statistics associated to
|
(* TODO. Log the error in some statistics associated to
|
||||||
the peers that informed us of the operations. And
|
the peers that informed us of the operations. And
|
||||||
@ -194,7 +204,7 @@ let create p2p net =
|
|||||||
let (module Proto) = !protocol in
|
let (module Proto) = !protocol in
|
||||||
let result =
|
let result =
|
||||||
map_s (fun (h, b) ->
|
map_s (fun (h, b) ->
|
||||||
State.Operation.known st h >>= function
|
Distributed_db.Operation.known net_db h >>= function
|
||||||
| true ->
|
| true ->
|
||||||
failwith
|
failwith
|
||||||
"Previously injected operation %a"
|
"Previously injected operation %a"
|
||||||
@ -203,16 +213,14 @@ let create p2p net =
|
|||||||
Lwt.return
|
Lwt.return
|
||||||
(Proto.parse_operation h b
|
(Proto.parse_operation h b
|
||||||
|> record_trace_exn (Invalid_operation h)))
|
|> record_trace_exn (Invalid_operation h)))
|
||||||
(Operation_hash_map.bindings ops) >>=? fun parsed_ops ->
|
(Operation_hash.Map.bindings ops) >>=? fun parsed_ops ->
|
||||||
Proto.preapply
|
Proto.preapply
|
||||||
!context !head (Time.now ())
|
!context !head.hash (Time.now ())
|
||||||
true parsed_ops >>=? fun (ctxt, res) ->
|
true parsed_ops >>=? fun (ctxt, res) ->
|
||||||
let register h =
|
let register h =
|
||||||
let b =
|
let op = Operation_hash.Map.find h ops in
|
||||||
Store.Operation.to_bytes @@
|
Distributed_db.Operation.inject
|
||||||
Operation_hash_map.find h ops in
|
net_db h op >>= fun _ ->
|
||||||
State.Operation.(store st b) >>= fun _ ->
|
|
||||||
State.Net.Mempool.add net h >>= fun _ ->
|
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
Lwt_list.iter_s
|
Lwt_list.iter_s
|
||||||
(fun h ->
|
(fun h ->
|
||||||
@ -227,19 +235,19 @@ let create p2p net =
|
|||||||
if force then
|
if force then
|
||||||
Lwt_list.iter_p
|
Lwt_list.iter_p
|
||||||
(fun (h, _exns) -> register h)
|
(fun (h, _exns) -> register h)
|
||||||
(Operation_hash_map.bindings
|
(Operation_hash.Map.bindings
|
||||||
res.Updater.branch_delayed) >>= fun () ->
|
res.Updater.branch_delayed) >>= fun () ->
|
||||||
Lwt_list.iter_p
|
Lwt_list.iter_p
|
||||||
(fun (h, _exns) -> register h)
|
(fun (h, _exns) -> register h)
|
||||||
(Operation_hash_map.bindings
|
(Operation_hash.Map.bindings
|
||||||
res.Updater.branch_refused) >>= fun () ->
|
res.Updater.branch_refused) >>= fun () ->
|
||||||
operations :=
|
operations :=
|
||||||
{ !operations with
|
{ !operations with
|
||||||
branch_delayed =
|
branch_delayed =
|
||||||
Operation_hash_map.merge merge
|
Operation_hash.Map.merge merge
|
||||||
!operations.branch_delayed res.branch_delayed ;
|
!operations.branch_delayed res.branch_delayed ;
|
||||||
branch_refused =
|
branch_refused =
|
||||||
Operation_hash_map.merge merge
|
Operation_hash.Map.merge merge
|
||||||
!operations.branch_refused res.branch_refused ;
|
!operations.branch_refused res.branch_refused ;
|
||||||
} ;
|
} ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
@ -256,22 +264,27 @@ let create p2p net =
|
|||||||
| `Register op ->
|
| `Register op ->
|
||||||
lwt_debug "register %a" Operation_hash.pp_short op >>= fun () ->
|
lwt_debug "register %a" Operation_hash.pp_short op >>= fun () ->
|
||||||
broadcast_unprocessed := true ;
|
broadcast_unprocessed := true ;
|
||||||
unprocessed := Operation_hash_set.singleton op ;
|
unprocessed := Operation_hash.Set.singleton op ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
| `Flush ->
|
| `Flush (new_head : State.Valid_block.t) ->
|
||||||
State.Net.Blockchain.head net >>= fun new_head ->
|
let new_protocol =
|
||||||
State.Net.Blockchain.protocol net >>= fun new_protocol ->
|
match new_head.protocol with
|
||||||
State.Net.Mempool.get net >>= fun new_mempool ->
|
| None ->
|
||||||
|
assert false (* FIXME, this should not happen! *)
|
||||||
|
| Some protocol -> protocol in
|
||||||
|
list_pendings
|
||||||
|
net_db ~from_block:!head ~to_block:new_head
|
||||||
|
(Updater.operations !operations) >>= fun new_mempool ->
|
||||||
lwt_debug "flush %a (mempool: %d)"
|
lwt_debug "flush %a (mempool: %d)"
|
||||||
Block_hash.pp_short new_head.hash
|
Block_hash.pp_short new_head.hash
|
||||||
(Operation_hash_set.cardinal new_mempool) >>= fun () ->
|
(Operation_hash.Set.cardinal new_mempool) >>= fun () ->
|
||||||
(* Reset the pre-validation context *)
|
(* Reset the pre-validation context *)
|
||||||
head := new_head.hash ;
|
head := new_head ;
|
||||||
protocol := new_protocol ;
|
protocol := new_protocol ;
|
||||||
operations := Updater.empty_result;
|
operations := Updater.empty_result ;
|
||||||
broadcast_unprocessed := false ;
|
broadcast_unprocessed := false ;
|
||||||
unprocessed := new_mempool;
|
unprocessed := new_mempool ;
|
||||||
timestamp := Time.now ();
|
timestamp := Time.now () ;
|
||||||
(* Tag the context as a prevalidation context. *)
|
(* Tag the context as a prevalidation context. *)
|
||||||
let (module Proto) = new_protocol in
|
let (module Proto) = new_protocol in
|
||||||
Proto.preapply new_head.context
|
Proto.preapply new_head.context
|
||||||
@ -283,19 +296,24 @@ let create p2p net =
|
|||||||
in
|
in
|
||||||
Lwt_utils.worker "prevalidator" ~run:worker_loop ~cancel in
|
Lwt_utils.worker "prevalidator" ~run:worker_loop ~cancel in
|
||||||
|
|
||||||
let flush () =
|
let flush head =
|
||||||
push_to_worker `Flush;
|
push_to_worker (`Flush head) ;
|
||||||
if not (Lwt.is_sleeping !running_validation) then
|
if not (Lwt.is_sleeping !running_validation) then
|
||||||
Lwt.cancel !running_validation
|
Lwt.cancel !running_validation
|
||||||
in
|
in
|
||||||
let register_operation op = push_to_worker (`Register op) in
|
let notify_operation gid op =
|
||||||
|
Lwt.async begin fun () ->
|
||||||
|
Distributed_db.Operation.fetch net_db ~peer:gid op >>= fun _ ->
|
||||||
|
push_to_worker (`Register op) ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end in
|
||||||
let prevalidate_operations force raw_ops =
|
let prevalidate_operations force raw_ops =
|
||||||
let ops = List.map Store.Operation.hash raw_ops in
|
let ops = List.map Store.Operation.hash raw_ops in
|
||||||
let ops_map =
|
let ops_map =
|
||||||
List.fold_left
|
List.fold_left
|
||||||
(fun map op ->
|
(fun map op ->
|
||||||
Operation_hash_map.add (Store.Operation.hash op) op map)
|
Operation_hash.Map.add (Store.Operation.hash op) op map)
|
||||||
Operation_hash_map.empty raw_ops in
|
Operation_hash.Map.empty raw_ops in
|
||||||
let wait, waker = Lwt.wait () in
|
let wait, waker = Lwt.wait () in
|
||||||
push_to_worker (`Prevalidate (ops_map, waker, force));
|
push_to_worker (`Prevalidate (ops_map, waker, force));
|
||||||
wait >>=? fun result ->
|
wait >>=? fun result ->
|
||||||
@ -307,54 +325,62 @@ let create p2p net =
|
|||||||
cancel () >>= fun () ->
|
cancel () >>= fun () ->
|
||||||
prevalidation_worker in
|
prevalidation_worker in
|
||||||
|
|
||||||
|
let pending ?block () =
|
||||||
|
let ops = Updater.operations !operations in
|
||||||
|
match block with
|
||||||
|
| None -> Lwt.return ops
|
||||||
|
| Some to_block ->
|
||||||
|
list_pendings net_db ~from_block:!head ~to_block ops
|
||||||
|
in
|
||||||
Lwt.return {
|
Lwt.return {
|
||||||
net ;
|
net_db ;
|
||||||
flush ;
|
flush ;
|
||||||
register_operation ;
|
notify_operation ;
|
||||||
prevalidate_operations ;
|
prevalidate_operations ;
|
||||||
operations =
|
operations =
|
||||||
(fun () ->
|
(fun () ->
|
||||||
{ !operations with applied = List.rev !operations.applied },
|
{ !operations with applied = List.rev !operations.applied },
|
||||||
!unprocessed) ;
|
!unprocessed) ;
|
||||||
|
pending ;
|
||||||
timestamp = (fun () -> !timestamp) ;
|
timestamp = (fun () -> !timestamp) ;
|
||||||
context = (fun () -> !context) ;
|
context = (fun () -> !context) ;
|
||||||
protocol = (fun () -> !protocol) ;
|
protocol = (fun () -> !protocol) ;
|
||||||
shutdown ;
|
shutdown ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let flush pv = pv.flush ()
|
let flush pv head = pv.flush head
|
||||||
let register_operation pv = pv.register_operation
|
let notify_operation pv = pv.notify_operation
|
||||||
let prevalidate_operations pv = pv.prevalidate_operations
|
let prevalidate_operations pv = pv.prevalidate_operations
|
||||||
let operations pv = pv.operations ()
|
let operations pv = pv.operations ()
|
||||||
|
let pending ?block pv = pv.pending ?block ()
|
||||||
let timestamp pv = pv.timestamp ()
|
let timestamp pv = pv.timestamp ()
|
||||||
let context pv = pv.context ()
|
let context pv = pv.context ()
|
||||||
let protocol pv = pv.protocol ()
|
let protocol pv = pv.protocol ()
|
||||||
let shutdown pv = pv.shutdown ()
|
let shutdown pv = pv.shutdown ()
|
||||||
|
|
||||||
let inject_operation pv ?(force = false) (op: Store.operation) =
|
let inject_operation pv ?(force = false) (op: Store.Operation.t) =
|
||||||
let State.Net net_id = op.shell.net_id
|
let net_id = State.Net.id (Distributed_db.state pv.net_db) in
|
||||||
and State.Net net_id' = State.Net.id pv.net in
|
|
||||||
let wrap_error h map =
|
let wrap_error h map =
|
||||||
begin
|
begin
|
||||||
try return (Operation_hash_map.find h map)
|
try return (Operation_hash.Map.find h map)
|
||||||
with Not_found ->
|
with Not_found ->
|
||||||
failwith "unexpected protocol result"
|
failwith "unexpected protocol result"
|
||||||
end >>=? fun errors ->
|
end >>=? fun errors ->
|
||||||
Lwt.return (Error errors) in
|
Lwt.return (Error errors) in
|
||||||
fail_unless (Block_hash.equal net_id net_id')
|
fail_unless (Store.Net_id.equal net_id op.shell.net_id)
|
||||||
(Unclassified
|
(Unclassified
|
||||||
"Prevalidator.inject_operation: invalid network") >>=? fun () ->
|
"Prevalidator.inject_operation: invalid network") >>=? fun () ->
|
||||||
pv.prevalidate_operations force [op] >>=? function
|
pv.prevalidate_operations force [op] >>=? function
|
||||||
| ([h], { Updater.applied = [h'] }) when Operation_hash.equal h h' ->
|
| ([h], { Updater.applied = [h'] }) when Operation_hash.equal h h' ->
|
||||||
return ()
|
return ()
|
||||||
| ([h], { Updater.refused })
|
| ([h], { Updater.refused })
|
||||||
when Operation_hash_map.cardinal refused = 1 ->
|
when Operation_hash.Map.cardinal refused = 1 ->
|
||||||
wrap_error h refused
|
wrap_error h refused
|
||||||
| ([h], { Updater.branch_refused })
|
| ([h], { Updater.branch_refused })
|
||||||
when Operation_hash_map.cardinal branch_refused = 1 && not force ->
|
when Operation_hash.Map.cardinal branch_refused = 1 && not force ->
|
||||||
wrap_error h branch_refused
|
wrap_error h branch_refused
|
||||||
| ([h], { Updater.branch_delayed })
|
| ([h], { Updater.branch_delayed })
|
||||||
when Operation_hash_map.cardinal branch_delayed = 1 && not force ->
|
when Operation_hash.Map.cardinal branch_delayed = 1 && not force ->
|
||||||
wrap_error h branch_delayed
|
wrap_error h branch_delayed
|
||||||
| _ ->
|
| _ ->
|
||||||
if force then
|
if force then
|
||||||
|
@ -29,28 +29,27 @@
|
|||||||
type t
|
type t
|
||||||
|
|
||||||
(** Creation and destruction of a "prevalidation" worker. *)
|
(** Creation and destruction of a "prevalidation" worker. *)
|
||||||
val create: Tezos_p2p.net -> State.Net.t -> t Lwt.t
|
val create: Distributed_db.net -> t Lwt.t
|
||||||
val shutdown: t -> unit Lwt.t
|
val shutdown: t -> unit Lwt.t
|
||||||
|
|
||||||
(** Notify the prevalidator of a new operation. This is the
|
val notify_operation: t -> P2p.Peer_id.t -> Operation_hash.t -> unit
|
||||||
entry-point used by the P2P layer. The operation content has been
|
|
||||||
previously stored on disk. *)
|
|
||||||
val register_operation: t -> Operation_hash.t -> unit
|
|
||||||
|
|
||||||
(** Conditionnaly inject a new operation in the node: the operation will
|
(** Conditionnaly inject a new operation in the node: the operation will
|
||||||
be ignored when it is (strongly) refused This is the
|
be ignored when it is (strongly) refused This is the
|
||||||
entry-point used by the P2P layer. The operation content has been
|
entry-point used by the P2P layer. The operation content has been
|
||||||
previously stored on disk. *)
|
previously stored on disk. *)
|
||||||
val inject_operation:
|
val inject_operation:
|
||||||
t -> ?force:bool -> Store.operation -> unit tzresult Lwt.t
|
t -> ?force:bool -> State.Operation.t -> unit tzresult Lwt.t
|
||||||
|
|
||||||
val flush: t -> unit
|
val flush: t -> State.Valid_block.t -> unit
|
||||||
val timestamp: t -> Time.t
|
val timestamp: t -> Time.t
|
||||||
val operations: t -> error Updater.preapply_result * Operation_hash_set.t
|
val operations: t -> error Updater.preapply_result * Operation_hash.Set.t
|
||||||
val context: t -> Context.t
|
val context: t -> Context.t
|
||||||
val protocol: t -> (module Updater.REGISTRED_PROTOCOL)
|
val protocol: t -> (module Updater.REGISTRED_PROTOCOL)
|
||||||
|
|
||||||
|
val pending: ?block:State.Valid_block.t -> t -> Operation_hash.Set.t Lwt.t
|
||||||
|
|
||||||
val preapply:
|
val preapply:
|
||||||
State.state -> Context.t -> (module Updater.REGISTRED_PROTOCOL) ->
|
Distributed_db.net -> Context.t -> (module Updater.REGISTRED_PROTOCOL) ->
|
||||||
Block_hash.t -> Time.t -> bool -> Operation_hash.t list ->
|
Block_hash.t -> Time.t -> bool -> Operation_hash.t list ->
|
||||||
(Context.t * error Updater.preapply_result) tzresult Lwt.t
|
(Context.t * error Updater.preapply_result) tzresult Lwt.t
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -11,228 +11,209 @@
|
|||||||
|
|
||||||
It encapsulates access to:
|
It encapsulates access to:
|
||||||
|
|
||||||
- the (distributed) database of raw blocks and operations;
|
|
||||||
- the index of validation contexts; and
|
- the index of validation contexts; and
|
||||||
- the persistent state of the node:
|
- the persistent state of the node:
|
||||||
- active "networks";
|
|
||||||
- the blockchain and its alternate heads of a "network";
|
- the blockchain and its alternate heads of a "network";
|
||||||
- the pool of pending operations of a "network".
|
- the pool of pending operations of a "network".
|
||||||
|
|
||||||
*)
|
*)
|
||||||
type t
|
type t
|
||||||
type state = t
|
type global_state = t
|
||||||
|
|
||||||
(** A "network" identifier. Here, a "network" denotes an independant
|
module Net_id = Store.Net_id
|
||||||
blockchain, or a "fork" of another blockchain. Such a "network"
|
|
||||||
is identified by the hash of its genesis block. *)
|
|
||||||
type net_id = Store.net_id = Net of Block_hash.t
|
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Invalid_fitness of Fitness.fitness * Fitness.fitness
|
|
||||||
| Unknown_protocol of Protocol_hash.t
|
|
||||||
| Inactive_network of Store.net_id
|
|
||||||
| Unknown_network of Store.net_id
|
|
||||||
| Cannot_parse
|
|
||||||
|
|
||||||
(** Read the internal state of the node and initialize
|
(** Read the internal state of the node and initialize
|
||||||
the blocks/operations/contexts databases. *)
|
the blocks/operations/contexts databases. *)
|
||||||
|
|
||||||
val read:
|
val read:
|
||||||
request_operations: (net_id -> Operation_hash.t list -> unit) ->
|
?patch_context:(Context.t -> Context.t Lwt.t) ->
|
||||||
request_blocks: (net_id -> Block_hash.t list -> unit) ->
|
|
||||||
request_protocols: (Protocol_hash.t list -> unit) ->
|
|
||||||
store_root:string ->
|
store_root:string ->
|
||||||
context_root:string ->
|
context_root:string ->
|
||||||
ttl:int ->
|
|
||||||
?patch_context:(Context.t -> Context.t Lwt.t) ->
|
|
||||||
unit ->
|
unit ->
|
||||||
state Lwt.t
|
global_state tzresult Lwt.t
|
||||||
|
|
||||||
(** Store the internal state of the node on disk. *)
|
|
||||||
val store: state -> unit Lwt.t
|
|
||||||
|
|
||||||
(** Shutdown the various databases worker and store the
|
|
||||||
internal state of the node on disk. *)
|
|
||||||
val shutdown: state -> unit Lwt.t
|
|
||||||
|
|
||||||
|
|
||||||
(** {2 Operation database} ****************************************************)
|
(** {2 Errors} **************************************************************)
|
||||||
|
|
||||||
(** The local and distributed database of operations. *)
|
type error +=
|
||||||
module Operation : sig
|
| Invalid_fitness of Fitness.fitness * Fitness.fitness
|
||||||
|
| Unknown_network of Store.Net_id.t
|
||||||
|
| Unknown_operation of Operation_hash.t
|
||||||
|
| Unknown_block of Block_hash.t
|
||||||
|
| Unknown_protocol of Protocol_hash.t
|
||||||
|
| Cannot_parse
|
||||||
|
|
||||||
type key = Operation_hash.t
|
|
||||||
|
|
||||||
(** Raw operations in the database (partially parsed). *)
|
(** {2 Network} ************************************************************)
|
||||||
type shell_header = Store.shell_operation = {
|
|
||||||
net_id: net_id ;
|
(** Data specific to a given network. *)
|
||||||
(** The genesis of the chain this operation belongs to. *)
|
module Net : sig
|
||||||
|
|
||||||
|
type t
|
||||||
|
type net = t
|
||||||
|
|
||||||
|
type genesis = {
|
||||||
|
time: Time.t ;
|
||||||
|
block: Block_hash.t ;
|
||||||
|
protocol: Protocol_hash.t ;
|
||||||
}
|
}
|
||||||
type t = Store.operation = {
|
val genesis_encoding: genesis Data_encoding.t
|
||||||
shell: shell_header ;
|
|
||||||
proto: MBytes.t ;
|
|
||||||
(** The raw part of the operation, as understood only by the protocol. *)
|
|
||||||
}
|
|
||||||
type operation = t
|
|
||||||
|
|
||||||
(** Is an operation stored in the local database ? *)
|
(** Initialize a network for a given [genesis]. By default the network
|
||||||
val known: state -> key -> bool Lwt.t
|
never expirate and the test_protocol is the genesis protocol. *)
|
||||||
|
val create:
|
||||||
|
global_state ->
|
||||||
|
?test_protocol: Protocol_hash.t ->
|
||||||
|
?forked_network_ttl: int ->
|
||||||
|
genesis -> net Lwt.t
|
||||||
|
|
||||||
(** Read an operation in the local database. This returns [None]
|
(** Look up for a network by the hash of its genesis block. *)
|
||||||
when the operation does not exist in the local database; this returns
|
val get: global_state -> Net_id.t -> net tzresult Lwt.t
|
||||||
[Some (Error _)] when [mark_invalid] was used. This also returns
|
|
||||||
the time when the operation was stored on the local database. *)
|
|
||||||
val read:
|
|
||||||
state -> key -> operation tzresult Time.timed_data option Lwt.t
|
|
||||||
|
|
||||||
(** Read an operation in the local database. This throws [Not_found]
|
(** Returns all the known networks. *)
|
||||||
when the operation does not exist in the local database or when
|
val all: global_state -> net list Lwt.t
|
||||||
[mark_invalid] was used. *)
|
|
||||||
val read_exn:
|
|
||||||
state -> key -> operation Time.timed_data Lwt.t
|
|
||||||
exception Invalid of key * error list
|
|
||||||
|
|
||||||
(** Read an operation in the local database (without parsing). *)
|
(** Destroy a network: this completly removes from the local storage all
|
||||||
val raw_read: state -> key -> MBytes.t option Lwt.t
|
the data associated to the network (this includes blocks and
|
||||||
|
operations). *)
|
||||||
|
val destroy: global_state -> net -> unit Lwt.t
|
||||||
|
|
||||||
(** Read an operation from the distributed database. This may block
|
(** Accessors. Respectively access to;
|
||||||
while the block is fetched from the P2P network. *)
|
- the network id (the hash of its genesis block)
|
||||||
val fetch:
|
- its optional expiration time
|
||||||
state -> Store.net_id -> key -> operation tzresult Time.timed_data Lwt.t
|
- the associated global state. *)
|
||||||
|
val id: net -> Net_id.t
|
||||||
(** Request operations on the P2P network without waiting for answers. *)
|
val genesis: net -> genesis
|
||||||
val prefetch: state -> Store.net_id -> key list -> unit
|
val expiration: net -> Time.t option
|
||||||
|
val forked_network_ttl: net -> Int64.t option
|
||||||
(** Add an operation to the local database. This returns [Ok None]
|
|
||||||
if the operation was already stored in the database, or returns
|
|
||||||
the parsed operation if not. It may also fails when the shell
|
|
||||||
part of the operation cannot be parsed or when the operation
|
|
||||||
does not belong to an active "network". For a given sequence of
|
|
||||||
bytes, it is guaranted that at most one call to [store] returns
|
|
||||||
[Some _]. *)
|
|
||||||
val store:
|
|
||||||
state -> MBytes.t -> (Operation_hash.t * operation) option tzresult Lwt.t
|
|
||||||
|
|
||||||
(** Mark an operation as invalid in the local database. This returns
|
|
||||||
[false] if then operation was previously stores in the local
|
|
||||||
database. The operation is not removed from the local database,
|
|
||||||
but its content is replaced by the an list of errors. *)
|
|
||||||
val mark_invalid: state -> key -> error list -> bool Lwt.t
|
|
||||||
|
|
||||||
(** Returns the list known-invalid operations. *)
|
|
||||||
val invalid: state -> Operation_hash_set.t Lwt.t
|
|
||||||
|
|
||||||
(** Create a stream of all the newly locally-stored operations.
|
|
||||||
The returned function allows to terminate the stream. *)
|
|
||||||
val create_watcher:
|
|
||||||
state -> (key * operation) Lwt_stream.t * Watcher.stopper
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** {2 Block database} ********************************************************)
|
(** Shared signature for the databases of block_headers,
|
||||||
|
operations and protocols. *)
|
||||||
|
module type DATA_STORE = sig
|
||||||
|
|
||||||
(** The local and distributed database of blocks. *)
|
type store
|
||||||
module Block : sig
|
type key
|
||||||
|
type value
|
||||||
|
|
||||||
type shell_header = Store.shell_block = {
|
(** Is a value stored in the local database ? *)
|
||||||
net_id: net_id ;
|
val known: store -> key -> bool Lwt.t
|
||||||
(** The genesis of the chain this block belongs to. *)
|
|
||||||
|
(** Read a value in the local database. *)
|
||||||
|
val read: store -> key -> value tzresult Lwt.t
|
||||||
|
val read_opt: store -> key -> value option Lwt.t
|
||||||
|
val read_exn: store -> key -> value Lwt.t
|
||||||
|
|
||||||
|
(** Read a value in the local database (without parsing). *)
|
||||||
|
val read_raw: store -> key -> MBytes.t tzresult Lwt.t
|
||||||
|
val read_raw_opt: store -> key -> MBytes.t option Lwt.t
|
||||||
|
val read_raw_exn: store -> key -> MBytes.t Lwt.t
|
||||||
|
|
||||||
|
(** Read data discovery time (the time when `store` was called). *)
|
||||||
|
val read_discovery_time: store -> key -> Time.t tzresult Lwt.t
|
||||||
|
val read_discovery_time_opt: store -> key -> Time.t option Lwt.t
|
||||||
|
val read_discovery_time_exn: store -> key -> Time.t Lwt.t
|
||||||
|
|
||||||
|
(** Store a value in the local database (pre-parsed value). It
|
||||||
|
returns [false] when the value is already stored, or [true]
|
||||||
|
otherwise. For a given value, only one call to `store` (or an
|
||||||
|
equivalent call to `store_raw`) might return [true]. *)
|
||||||
|
val store: store -> value -> bool Lwt.t
|
||||||
|
|
||||||
|
(** Store a value in the local database (unparsed data). It returns
|
||||||
|
[Ok None] when the data is already stored, or [Ok (Some (hash,
|
||||||
|
value))] otherwise. For a given data, only one call to
|
||||||
|
`store_raw` (or an equivalent call to `store`) might return [Ok
|
||||||
|
(Some _)]. It may return [Error] when the shell part of the value
|
||||||
|
cannot be parsed. *)
|
||||||
|
val store_raw: store -> key -> MBytes.t -> value option tzresult Lwt.t
|
||||||
|
|
||||||
|
(** Remove a value from the local database. *)
|
||||||
|
val remove: store -> key -> bool Lwt.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Block_header database} *************************************************)
|
||||||
|
|
||||||
|
module Block_header : sig
|
||||||
|
|
||||||
|
type shell_header = Store.Block_header.shell_header = {
|
||||||
|
net_id: Net_id.t ;
|
||||||
predecessor: Block_hash.t ;
|
predecessor: Block_hash.t ;
|
||||||
(** The preceding block in the chain. *)
|
|
||||||
timestamp: Time.t ;
|
timestamp: Time.t ;
|
||||||
(** The date at which this block has been forged. *)
|
|
||||||
fitness: MBytes.t list ;
|
fitness: MBytes.t list ;
|
||||||
(** The announced score of the block. As a sequence of sequences
|
|
||||||
of unsigned bytes. Ordered by length and then by contents
|
|
||||||
lexicographically. *)
|
|
||||||
operations: Operation_hash.t list ;
|
operations: Operation_hash.t list ;
|
||||||
(** The raw part of the block header, as understood only by the protocol. *)
|
|
||||||
}
|
}
|
||||||
type t = Store.block = {
|
|
||||||
|
type t = Store.Block_header.t = {
|
||||||
shell: shell_header ;
|
shell: shell_header ;
|
||||||
proto: MBytes.t ;
|
proto: MBytes.t ;
|
||||||
}
|
}
|
||||||
type block = t
|
type block_header = t
|
||||||
|
|
||||||
(** Is a block stored in the local database ? *)
|
include DATA_STORE with type store = Net.t
|
||||||
val known: state -> Block_hash.t -> bool Lwt.t
|
and type key = Block_hash.t
|
||||||
|
and type value = block_header
|
||||||
|
|
||||||
(** Read a block in the local database. *)
|
val mark_invalid: Net.t -> Block_hash.t -> error list -> bool Lwt.t
|
||||||
val read: state -> Block_hash.t -> block Time.timed_data option Lwt.t
|
|
||||||
|
|
||||||
(** Read a block in the local database. This throws [Not_found]
|
val invalid: Net.t -> Block_hash.t -> error list option Lwt.t
|
||||||
when the block does not exist in the local database. *)
|
val pending: Net.t -> Block_hash.t -> bool Lwt.t
|
||||||
val read_exn: state -> Block_hash.t -> block Time.timed_data Lwt.t
|
|
||||||
|
|
||||||
(** Read the predecessor of a block in the local database. *)
|
val list_pending: Net.t -> Block_hash.Set.t Lwt.t
|
||||||
val read_pred: state -> Block_hash.t -> Block_hash.t option Lwt.t
|
val list_invalid: Net.t -> Block_hash.Set.t Lwt.t
|
||||||
|
|
||||||
(** Read a block in the local database (without parsing). *)
|
module Helpers : sig
|
||||||
val raw_read: state -> Block_hash.t -> MBytes.t option Lwt.t
|
|
||||||
|
|
||||||
(** Read a block from the distributed database. This may block
|
(** If [h1] is an ancestor of [h2] in the current [state],
|
||||||
while the block is fetched from the P2P network. *)
|
then [path state h1 h2] returns the chain of block from
|
||||||
val fetch: state -> Store.net_id -> Block_hash.t -> block Time.timed_data Lwt.t
|
[h1] (excluded) to [h2] (included). *)
|
||||||
|
val path:
|
||||||
|
Net.t -> Block_hash.t -> Block_hash.t ->
|
||||||
|
(Block_hash.t * shell_header) list tzresult Lwt.t
|
||||||
|
|
||||||
(** Request blocks on the P2P network without waiting for answers. *)
|
(** [common_ancestor state h1 h2] returns the first common ancestors
|
||||||
val prefetch: state -> Store.net_id -> Block_hash.t list -> unit
|
in the history of blocks [h1] and [h2]. *)
|
||||||
|
val common_ancestor:
|
||||||
|
Net.t -> Block_hash.t -> Block_hash.t ->
|
||||||
|
(Block_hash.t * shell_header) tzresult Lwt.t
|
||||||
|
|
||||||
(** Add a block to the local database. This returns [Ok None] if the
|
(** [block_locator state max_length h] compute the sparse block locator
|
||||||
block was already stored in the database, or returns the
|
(/à la/ Bitcoin) for the block [h]. *)
|
||||||
(partially) parsed block if not. It may also fails when the
|
val block_locator:
|
||||||
shell part of the block cannot be parsed or when the block does
|
Net.t -> int -> Block_hash.t -> Block_hash.t list tzresult Lwt.t
|
||||||
not belong to an active "network". For a given sequence of
|
|
||||||
bytes, it is guaranted that at most one call to [store] returns
|
|
||||||
[Some _]. *)
|
|
||||||
val store:
|
|
||||||
state -> MBytes.t -> (Block_hash.t * block) option tzresult Lwt.t
|
|
||||||
|
|
||||||
(** Create a stream of all the newly locally-stored blocks.
|
(** [iter_predecessors state blocks f] iter [f] on [blocks] and
|
||||||
The returned function allows to terminate the stream. *)
|
their recursive (known) predecessors. Blocks are visited with a
|
||||||
val create_watcher:
|
decreasing fitness (then decreasing timestamp). If the optional
|
||||||
state -> (Block_hash.t * block) Lwt_stream.t * Watcher.stopper
|
argument [max] is provided, the iteration is stopped after [max]
|
||||||
|
visited block. If [min_fitness] id provided, blocks with a
|
||||||
|
fitness lower than [min_fitness] are ignored. If [min_date],
|
||||||
|
blocks with a fitness lower than [min_date] are ignored. *)
|
||||||
|
val iter_predecessors:
|
||||||
|
Net.t ->
|
||||||
|
?max:int ->
|
||||||
|
?min_fitness:Fitness.fitness ->
|
||||||
|
?min_date:Time.t ->
|
||||||
|
block_header list ->
|
||||||
|
f:(block_header -> unit Lwt.t) ->
|
||||||
|
unit tzresult Lwt.t
|
||||||
|
|
||||||
(** If [h1] is an ancestor of [h2] in the current [state],
|
end
|
||||||
then [path state h1 h2] returns the chain of block from
|
|
||||||
[h1] (excluded) to [h2] (included). *)
|
|
||||||
val path:
|
|
||||||
state -> Block_hash.t -> Block_hash.t -> Block_hash.t list tzresult Lwt.t
|
|
||||||
|
|
||||||
(** [common_ancestor state h1 h2] returns the first common ancestors
|
|
||||||
in the history of blocks [h1] and [h2]. *)
|
|
||||||
val common_ancestor:
|
|
||||||
state -> Block_hash.t -> Block_hash.t -> Block_hash.t tzresult Lwt.t
|
|
||||||
|
|
||||||
(** [block_locator state max_length h] compute the sparse block locator
|
|
||||||
(/à la/ Bitcoin) for the block [h]. *)
|
|
||||||
val block_locator:
|
|
||||||
state -> int -> Block_hash.t -> Block_hash.t list tzresult Lwt.t
|
|
||||||
|
|
||||||
(** [iter_predecessors state blocks f] iter [f] on [blocks] and
|
|
||||||
their recursive (known) predecessors. Blocks are visited with a
|
|
||||||
decreasing fitness (then decreasing timestamp). If the optional
|
|
||||||
argument [max] is provided, the iteration is stopped after [max]
|
|
||||||
visited block. If [min_fitness] id provided, blocks with a
|
|
||||||
fitness lower than [min_fitness] are ignored. If [min_date],
|
|
||||||
blocks with a fitness lower than [min_date] are ignored. *)
|
|
||||||
val iter_predecessors:
|
|
||||||
state ->
|
|
||||||
?max:int ->
|
|
||||||
?min_fitness:Fitness.fitness ->
|
|
||||||
?min_date:Time.t ->
|
|
||||||
block list ->
|
|
||||||
f:(block -> unit Lwt.t) ->
|
|
||||||
unit tzresult Lwt.t
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
(** {2 Valid block} ***********************************************************)
|
(** {2 Valid block} ***********************************************************)
|
||||||
|
|
||||||
(** The local database of known-valid blocks. *)
|
(** The local database of known-valid blocks. *)
|
||||||
module Valid_block : sig
|
module Valid_block : sig
|
||||||
|
|
||||||
(** A previously validated block. *)
|
(** A validated block. *)
|
||||||
type t = private {
|
type t = private {
|
||||||
net_id: net_id ;
|
net_id: Net_id.t ;
|
||||||
(** The genesis of the chain this block belongs to. *)
|
(** The genesis of the chain this block belongs to. *)
|
||||||
hash: Block_hash.t ;
|
hash: Block_hash.t ;
|
||||||
(** The block hash. *)
|
(** The block hash. *)
|
||||||
@ -256,267 +237,148 @@ module Valid_block : sig
|
|||||||
test_protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
test_protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
||||||
(** The actual implementatino of the protocol to be used for the
|
(** The actual implementatino of the protocol to be used for the
|
||||||
next test network. *)
|
next test network. *)
|
||||||
test_network: (net_id * Time.t) option ;
|
test_network: (Net_id.t * Time.t) option ;
|
||||||
(** The current test network associated to the block, and the date
|
(** The current test network associated to the block, and the date
|
||||||
of its expiration date. *)
|
of its expiration date. *)
|
||||||
context: Context.t ;
|
context: Context.t ;
|
||||||
(** The validation context that was produced by the block validation. *)
|
(** The validation context that was produced by the block validation. *)
|
||||||
successors: Block_hash_set.t ;
|
successors: Block_hash.Set.t ;
|
||||||
|
invalid_successors: Block_hash.Set.t ;
|
||||||
(** The set of valid successors (including forked networks). *)
|
(** The set of valid successors (including forked networks). *)
|
||||||
invalid_successors: Block_hash_set.t ;
|
shell_header: Block_header.shell_header;
|
||||||
(** The set of invalid successors (including forked networks). *)
|
(** The oriignal header. *)
|
||||||
}
|
}
|
||||||
type valid_block = t
|
type valid_block = t
|
||||||
|
|
||||||
(** Is the block known as a valid block in the database ? *)
|
val known: Net.t -> Block_hash.t -> bool Lwt.t
|
||||||
val valid: state -> Block_hash.t -> bool Lwt.t
|
val read: Net.t -> Block_hash.t -> valid_block tzresult Lwt.t
|
||||||
|
val read_opt: Net.t -> Block_hash.t -> valid_block option Lwt.t
|
||||||
(** Is the block known in the database (valid or invalid) ? *)
|
val read_exn: Net.t -> Block_hash.t -> valid_block Lwt.t
|
||||||
val known: state -> Block_hash.t -> bool Lwt.t
|
|
||||||
|
|
||||||
(** Read a block in the database. This returns [None] when
|
|
||||||
the block did not get trough the validation process yet. This
|
|
||||||
returns [Error] if the block is known invalid or [Ok] otherwise. *)
|
|
||||||
val read: state -> Block_hash.t -> valid_block tzresult option Lwt.t
|
|
||||||
|
|
||||||
(** Read a block in the database. This throws [Not_found] when
|
|
||||||
the block did not get trough the validation process yet. This
|
|
||||||
throws [Invalid] if the block is known invalid. *)
|
|
||||||
val read_exn: state -> Block_hash.t -> valid_block Lwt.t
|
|
||||||
exception Invalid of Block_hash.t * error list
|
|
||||||
|
|
||||||
(** Returns all the known (validated) heads of all the known block chain.
|
|
||||||
(This includes the main blockchain and the non-expired test networks. *)
|
|
||||||
val known_heads: state -> valid_block Block_hash_map.t Lwt.t
|
|
||||||
|
|
||||||
(** Returns all the known blocks that not did get through the validator yet. *)
|
|
||||||
val postponed: state -> Block_hash_set.t Lwt.t
|
|
||||||
|
|
||||||
(** Returns all the known blocks whose validation failed. *)
|
|
||||||
val invalid: state -> Block_hash_set.t Lwt.t
|
|
||||||
|
|
||||||
(** Create a stream of all the newly validated blocks.
|
|
||||||
The returned function allows to terminate the stream. *)
|
|
||||||
val create_watcher: state -> (valid_block Lwt_stream.t * Watcher.stopper) Lwt.t
|
|
||||||
|
|
||||||
(** If [h1] is an ancestor of [h2] in the current [state],
|
|
||||||
then [path state h1 h2] returns the chain of block from
|
|
||||||
[h1] (excluded) to [h2] (included). Returns [None] otherwise. *)
|
|
||||||
val path:
|
|
||||||
state -> valid_block -> valid_block -> valid_block list option Lwt.t
|
|
||||||
|
|
||||||
(** [common_ancestor state h1 h2] returns the first common ancestors
|
|
||||||
in the history of blocks [h1] and [h2]. *)
|
|
||||||
val common_ancestor:
|
|
||||||
state -> valid_block -> valid_block -> valid_block Lwt.t
|
|
||||||
|
|
||||||
(** [block_locator state max_length h] compute the sparse block locator
|
|
||||||
(/à la/ Bitcoin) for the block [h]. *)
|
|
||||||
val block_locator: state -> int -> valid_block -> Block_hash.t list Lwt.t
|
|
||||||
|
|
||||||
(** [iter_predecessors state blocks f] iter [f] on [blocks] and
|
|
||||||
their recursive predecessors. Blocks are visited with a
|
|
||||||
decreasing fitness (then decreasing timestamp). If the optional
|
|
||||||
argument [max] is provided, the iteration is stopped after [max]
|
|
||||||
visited block. If [min_fitness] id provided, blocks with a
|
|
||||||
fitness lower than [min_fitness] are ignored. If [min_date],
|
|
||||||
blocks with a fitness lower than [min_date] are ignored. *)
|
|
||||||
val iter_predecessors:
|
|
||||||
state ->
|
|
||||||
?max:int ->
|
|
||||||
?min_fitness:Fitness.fitness ->
|
|
||||||
?min_date:Time.t ->
|
|
||||||
valid_block list ->
|
|
||||||
f:(valid_block -> unit Lwt.t) ->
|
|
||||||
unit tzresult Lwt.t
|
|
||||||
|
|
||||||
(**/**)
|
|
||||||
|
|
||||||
(* Store function to be used by the validator. *)
|
|
||||||
module Store : Persist.TYPED_STORE with type key = Block_hash.t
|
|
||||||
and type value = Context.t tzresult
|
|
||||||
val get_store: state -> Store.t Persist.shared_ref
|
|
||||||
|
|
||||||
(* Private interface for testing. *)
|
|
||||||
val store: state -> Block_hash.t -> Context.t -> valid_block tzresult Lwt.t
|
|
||||||
val store_invalid: state -> Block_hash.t -> error list -> bool Lwt.t
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
(** {2 Protocol database} ****************************************************)
|
|
||||||
|
|
||||||
(** The local and distributed database of protocols. *)
|
|
||||||
module Protocol : sig
|
|
||||||
|
|
||||||
type key = Protocol_hash.t
|
|
||||||
|
|
||||||
type component = Tezos_compiler.Protocol.component = {
|
|
||||||
name : string ;
|
|
||||||
interface : string option ;
|
|
||||||
implementation : string ;
|
|
||||||
}
|
|
||||||
|
|
||||||
type t = Tezos_compiler.Protocol.t
|
|
||||||
|
|
||||||
type protocol = t
|
|
||||||
|
|
||||||
(** Is a protocol stored in the local database ? *)
|
|
||||||
val known: state -> key -> bool Lwt.t
|
|
||||||
|
|
||||||
(** Read a protocol in the local database. This returns [None]
|
|
||||||
when the protocol does not exist in the local database; this returns
|
|
||||||
[Some (Error _)] when [mark_invalid] was used. This also returns
|
|
||||||
the time when the protocol was stored on the local database. *)
|
|
||||||
val read:
|
|
||||||
state -> key -> protocol tzresult Time.timed_data option Lwt.t
|
|
||||||
|
|
||||||
(** Read a protocol in the local database. This throws [Not_found]
|
|
||||||
when the protocol does not exist in the local database or when
|
|
||||||
[mark_invalid] was used. *)
|
|
||||||
val read_exn:
|
|
||||||
state -> key -> protocol Time.timed_data Lwt.t
|
|
||||||
exception Invalid of key * error list
|
|
||||||
|
|
||||||
(** Read an operation in the local database (without parsing). *)
|
|
||||||
val raw_read: state -> key -> MBytes.t option Lwt.t
|
|
||||||
|
|
||||||
(** Read a protocol from the distributed database. This may block
|
|
||||||
while the block is fetched from the P2P network. *)
|
|
||||||
val fetch:
|
|
||||||
state -> Store.net_id -> key -> protocol tzresult Time.timed_data Lwt.t
|
|
||||||
|
|
||||||
(** Request protocols on the P2P network without waiting for answers. *)
|
|
||||||
val prefetch: state -> Store.net_id -> key list -> unit
|
|
||||||
|
|
||||||
(** Add a protocol to the local database. This returns [Ok None]
|
|
||||||
if the protocol was already stored in the database, or returns
|
|
||||||
the parsed operation if not. It may also fails when the shell
|
|
||||||
part of the operation cannot be parsed or when the operation
|
|
||||||
does not belong to an active "network". For a given sequence of
|
|
||||||
bytes, it is guaranted that at most one call to [store] returns
|
|
||||||
[Some _]. *)
|
|
||||||
val store:
|
val store:
|
||||||
state -> MBytes.t -> (Protocol_hash.t * protocol) option tzresult Lwt.t
|
Net.t -> Block_hash.t -> Context.t -> valid_block option tzresult Lwt.t
|
||||||
|
|
||||||
(** Mark a protocol as invalid in the local database. This returns
|
val watcher: Net.t -> valid_block Lwt_stream.t * Watcher.stopper
|
||||||
[false] if the protocol was previously stored in the local
|
|
||||||
database. The protocol is not removed from the local database,
|
|
||||||
but its content is replaced by a list of errors. *)
|
|
||||||
val mark_invalid: state -> key -> error list -> bool Lwt.t
|
|
||||||
|
|
||||||
(** Returns the list known-invalid procols. *)
|
(** The known valid heads of the network's blockchain. *)
|
||||||
val invalid: state -> Protocol_hash_set.t Lwt.t
|
val known_heads: Net.t -> valid_block list Lwt.t
|
||||||
|
|
||||||
(** Create a stream of all the newly locally-stored protocols.
|
val fork_testnet:
|
||||||
The returned function allows to terminate the stream. *)
|
global_state -> Net.t -> valid_block -> Time.t -> Net.t tzresult Lwt.t
|
||||||
val create_watcher:
|
|
||||||
state -> (key * protocol) Lwt_stream.t * Watcher.stopper
|
|
||||||
|
|
||||||
val keys: state -> key list Lwt.t
|
module Current : sig
|
||||||
end
|
|
||||||
|
|
||||||
(** {2 Network} ****************************************************************)
|
|
||||||
|
|
||||||
(** Data specific to a given network. *)
|
|
||||||
module Net : sig
|
|
||||||
|
|
||||||
type t
|
|
||||||
type net = t
|
|
||||||
|
|
||||||
(** Initialize a network for a given [genesis]. It may fails if the
|
|
||||||
genesis block is a known invalid block. By default the network
|
|
||||||
never expirate and the test_protocol is the genesis protocol.
|
|
||||||
When the genesis block correspond to a valid block where
|
|
||||||
the "test_network" is set to be this genesis block, the test protocol
|
|
||||||
will be promoted as validation protocol(in this forked network only). *)
|
|
||||||
val create:
|
|
||||||
state -> ?expiration:Time.t -> ?test_protocol:Protocol_hash.t ->
|
|
||||||
Store.genesis -> net tzresult Lwt.t
|
|
||||||
|
|
||||||
(** Look up for a network by the hash of its genesis block. *)
|
|
||||||
val get: state -> net_id -> net tzresult
|
|
||||||
|
|
||||||
(** Returns all the known networks. *)
|
|
||||||
val all: state -> net list
|
|
||||||
|
|
||||||
(** Destroy a network: this completly removes from the local storage all
|
|
||||||
the data associated to the network (this includes blocks and
|
|
||||||
operations). *)
|
|
||||||
val destroy: net -> unit Lwt.t
|
|
||||||
|
|
||||||
(** Accessors. Respectively access to;
|
|
||||||
- the network id (the hash of its genesis block)
|
|
||||||
- its optional expiration time
|
|
||||||
- the associated global state. *)
|
|
||||||
val id: net -> net_id
|
|
||||||
val expiration: net -> Time.t option
|
|
||||||
val state: net -> state
|
|
||||||
|
|
||||||
(** Mark a network as active or inactive. Newly discovered blocks and
|
|
||||||
operations on inactive networks are ignored. *)
|
|
||||||
val activate: net -> unit
|
|
||||||
val deactivate: net -> unit
|
|
||||||
|
|
||||||
(** Return the list of active network. *)
|
|
||||||
val active: state -> net list
|
|
||||||
|
|
||||||
(** Test whether a network is active or not. *)
|
|
||||||
val is_active: state -> net_id -> bool
|
|
||||||
|
|
||||||
(** {3 Blockchain} ************************************************************)
|
|
||||||
|
|
||||||
module Blockchain : sig
|
|
||||||
|
|
||||||
(** The genesis block of the network's blockchain. On a test network,
|
(** The genesis block of the network's blockchain. On a test network,
|
||||||
the test protocol has been promoted as "main" protocol. *)
|
the test protocol has been promoted as "main" protocol. *)
|
||||||
val genesis: net -> Valid_block.t Lwt.t
|
val genesis: Net.t -> valid_block Lwt.t
|
||||||
|
|
||||||
(** The current head of the network's blockchain. *)
|
(** The current head of the network's blockchain. *)
|
||||||
val head: net -> Valid_block.t Lwt.t
|
val head: Net.t -> valid_block Lwt.t
|
||||||
|
|
||||||
(** The current protocol of the network's blockchain. *)
|
(** The current protocol of the network's blockchain. *)
|
||||||
val protocol: net -> (module Updater.REGISTRED_PROTOCOL) Lwt.t
|
val protocol: Net.t -> (module Updater.REGISTRED_PROTOCOL) Lwt.t
|
||||||
|
|
||||||
(** Record a block as the current head of the network's blockchain. *)
|
(** Record a block as the current head of the network's blockchain. *)
|
||||||
val set_head: net -> Valid_block.t -> unit Lwt.t
|
val set_head: Net.t -> valid_block -> unit Lwt.t
|
||||||
|
|
||||||
|
val mem: Net.t -> Block_hash.t -> bool Lwt.t
|
||||||
|
|
||||||
(** Atomically change the current head of the network's blockchain.
|
(** Atomically change the current head of the network's blockchain.
|
||||||
This returns [true] whenever the change succeeded, or [false]
|
This returns [true] whenever the change succeeded, or [false]
|
||||||
when the current head os not equal to the [old] argument. *)
|
when the current head os not equal to the [old] argument. *)
|
||||||
val test_and_set_head:
|
val test_and_set_head:
|
||||||
net -> old:Valid_block.t -> Valid_block.t -> bool Lwt.t
|
Net.t -> old:valid_block -> valid_block -> bool Lwt.t
|
||||||
|
|
||||||
(** Test whether a block belongs to the current branch of the network's
|
|
||||||
blockchain. *)
|
|
||||||
val mem: net -> Block_hash.t -> bool Lwt.t
|
|
||||||
|
|
||||||
(** [find_new net locator max_length], where [locator] is a sparse block
|
(** [find_new net locator max_length], where [locator] is a sparse block
|
||||||
locator (/à la/ Bitcoin), returns the missing block when compared
|
locator (/à la/ Bitcoin), returns the missing block when compared
|
||||||
with the current branch of [net]. *)
|
with the current branch of [net]. *)
|
||||||
val find_new:
|
val find_new:
|
||||||
net -> Block_hash.t list -> int -> Block_hash.t list tzresult Lwt.t
|
Net.t -> Block_hash.t list -> int -> Block_hash.t list tzresult Lwt.t
|
||||||
|
|
||||||
|
val new_blocks:
|
||||||
|
Net.t -> from_block:valid_block -> to_block:valid_block ->
|
||||||
|
(Block_hash.t * (Block_hash.t * Block_header.shell_header) list) Lwt.t
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** {3 Mempool} *************************************************************)
|
module Helpers : sig
|
||||||
|
|
||||||
(** The mempool contains the known not-trivially-invalid operations
|
(** If [h1] is an ancestor of [h2] in the current [state],
|
||||||
that are not yet included in the blockchain. *)
|
then [path state h1 h2] returns the chain of block from
|
||||||
module Mempool : sig
|
[h1] (excluded) to [h2] (included). Returns [None] otherwise. *)
|
||||||
|
val path:
|
||||||
|
Net.t -> valid_block -> valid_block -> valid_block list option Lwt.t
|
||||||
|
|
||||||
(** Returns the current mempool of the network. *)
|
(** [common_ancestor state h1 h2] returns the first common ancestors
|
||||||
val get: net -> Operation_hash_set.t Lwt.t
|
in the history of blocks [h1] and [h2]. *)
|
||||||
|
val common_ancestor:
|
||||||
|
Net.t -> valid_block -> valid_block -> valid_block Lwt.t
|
||||||
|
|
||||||
(** Add an operation to the mempool. *)
|
(** [block_locator state max_length h] compute the sparse block locator
|
||||||
val add: net -> Operation_hash.t -> bool Lwt.t
|
(/à la/ Bitcoin) for the block [h]. *)
|
||||||
|
val block_locator: Net.t -> int -> valid_block -> Block_hash.t list Lwt.t
|
||||||
|
|
||||||
(** Remove an operation from the mempool. *)
|
(** [iter_predecessors state blocks f] iter [f] on [blocks] and
|
||||||
val remove: net -> Operation_hash.t -> bool Lwt.t
|
their recursive predecessors. Blocks are visited with a
|
||||||
|
decreasing fitness (then decreasing timestamp). If the optional
|
||||||
(** Returns a sur-approximation to the mempool for an alternative
|
argument [max] is provided, the iteration is stopped after [max]
|
||||||
head in the blockchain. *)
|
visited block. If [min_fitness] id provided, blocks with a
|
||||||
val for_block: net -> Valid_block.t -> Operation_hash_set.t Lwt.t
|
fitness lower than [min_fitness] are ignored. If [min_date],
|
||||||
|
blocks with a fitness lower than [min_date] are ignored. *)
|
||||||
|
val iter_predecessors:
|
||||||
|
Net.t ->
|
||||||
|
?max:int ->
|
||||||
|
?min_fitness:Fitness.fitness ->
|
||||||
|
?min_date:Time.t ->
|
||||||
|
valid_block list ->
|
||||||
|
f:(valid_block -> unit Lwt.t) ->
|
||||||
|
unit tzresult Lwt.t
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Operation database} ****************************************************)
|
||||||
|
|
||||||
|
module Operation : sig
|
||||||
|
|
||||||
|
type shell_header = Store.Operation.shell_header = {
|
||||||
|
net_id: Net_id.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
type t = Store.Operation.t = {
|
||||||
|
shell: shell_header ;
|
||||||
|
proto: MBytes.t ;
|
||||||
|
}
|
||||||
|
|
||||||
|
include DATA_STORE with type store = Net.t
|
||||||
|
and type key = Operation_hash.t
|
||||||
|
and type value = t
|
||||||
|
|
||||||
|
val mark_invalid: Net.t -> Operation_hash.t -> error list -> bool Lwt.t
|
||||||
|
|
||||||
|
val in_chain: Net.t -> Operation_hash.t -> bool Lwt.t
|
||||||
|
val pending: Net.t -> Operation_hash.t -> bool Lwt.t
|
||||||
|
val invalid: Net.t -> Operation_hash.t -> error list option Lwt.t
|
||||||
|
|
||||||
|
val list_pending: Net.t -> Operation_hash.Set.t Lwt.t
|
||||||
|
|
||||||
|
val list_invalid: Net.t -> Operation_hash.Set.t Lwt.t
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
(** {2 Protocol database} ***************************************************)
|
||||||
|
|
||||||
|
module Protocol : sig
|
||||||
|
include DATA_STORE with type store = global_state
|
||||||
|
and type key = Protocol_hash.t
|
||||||
|
and type value = Tezos_compiler.Protocol.t
|
||||||
|
|
||||||
|
val list: global_state -> Protocol_hash.Set.t Lwt.t
|
||||||
|
|
||||||
|
(* val mark_invalid: Net.t -> Protocol_hash.t -> error list -> bool Lwt.t *)
|
||||||
|
(* val list_invalid: Net.t -> Protocol_hash.Set.t Lwt.t *)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
@ -1,175 +0,0 @@
|
|||||||
|
|
||||||
open P2p
|
|
||||||
|
|
||||||
type net_id = Store.net_id
|
|
||||||
|
|
||||||
type msg =
|
|
||||||
| Discover_blocks of net_id * Block_hash.t list (* Block locator *)
|
|
||||||
| Block_inventory of net_id * Block_hash.t list
|
|
||||||
|
|
||||||
| Get_blocks of Block_hash.t list
|
|
||||||
| Block of MBytes.t
|
|
||||||
|
|
||||||
| Current_operations of net_id
|
|
||||||
| Operation_inventory of net_id * Operation_hash.t list
|
|
||||||
|
|
||||||
| Get_operations of Operation_hash.t list
|
|
||||||
| Operation of MBytes.t
|
|
||||||
|
|
||||||
| Get_protocols of Protocol_hash.t list
|
|
||||||
| Protocol of MBytes.t
|
|
||||||
|
|
||||||
module Message = struct
|
|
||||||
|
|
||||||
type t = msg
|
|
||||||
|
|
||||||
let encoding =
|
|
||||||
let open Data_encoding in
|
|
||||||
let case ?max_length ~tag encoding unwrap wrap =
|
|
||||||
P2p.Encoding { tag; encoding; wrap; unwrap; max_length } in
|
|
||||||
[
|
|
||||||
case ~tag:0x10 (tup2 Block_hash.encoding (list Block_hash.encoding))
|
|
||||||
(function
|
|
||||||
| Discover_blocks (Net genesis_bh, bhs) -> Some (genesis_bh, bhs)
|
|
||||||
| _ -> None)
|
|
||||||
(fun (genesis_bh, bhs) -> Discover_blocks (Net genesis_bh, bhs));
|
|
||||||
case ~tag:0x11 (tup2 Block_hash.encoding (list Block_hash.encoding))
|
|
||||||
(function
|
|
||||||
| Block_inventory (Net genesis_bh, bhs) -> Some (genesis_bh, bhs)
|
|
||||||
| _ -> None)
|
|
||||||
(fun (genesis_bh, bhs) -> Block_inventory (Net genesis_bh, bhs));
|
|
||||||
|
|
||||||
case ~tag:0x12 (list Block_hash.encoding)
|
|
||||||
(function
|
|
||||||
| Get_blocks bhs -> Some bhs
|
|
||||||
| _ -> None)
|
|
||||||
(fun bhs -> Get_blocks bhs);
|
|
||||||
case ~tag:0x13 Data_encoding.bytes
|
|
||||||
(function Block b -> Some b | _ -> None)
|
|
||||||
(fun b -> Block b);
|
|
||||||
|
|
||||||
case ~tag:0x20 Block_hash.encoding
|
|
||||||
(function Current_operations (Net genesis_bh) -> Some genesis_bh | _ -> None)
|
|
||||||
(fun genesis_bh -> Current_operations (Net genesis_bh));
|
|
||||||
case ~tag:0x21 (tup2 Block_hash.encoding (list Operation_hash.encoding))
|
|
||||||
(function Operation_inventory ((Net genesis_bh), ops) -> Some (genesis_bh, ops) | _ -> None)
|
|
||||||
(fun (genesis_bh, ops) -> Operation_inventory (Net genesis_bh, ops));
|
|
||||||
|
|
||||||
case ~tag:0x22 (list Operation_hash.encoding)
|
|
||||||
(function
|
|
||||||
| Get_operations ops -> Some ops
|
|
||||||
| _ -> None)
|
|
||||||
(fun ops -> Get_operations ops);
|
|
||||||
case ~tag:0x23 Data_encoding.bytes
|
|
||||||
(function Operation o -> Some o | _ -> None)
|
|
||||||
(fun o -> Operation o);
|
|
||||||
|
|
||||||
case ~tag:0x32 (list Protocol_hash.encoding)
|
|
||||||
(function
|
|
||||||
| Get_protocols protos -> Some protos
|
|
||||||
| _ -> None)
|
|
||||||
(fun protos -> Get_protocols protos);
|
|
||||||
case ~tag:0x33 Data_encoding.bytes
|
|
||||||
(function Protocol proto -> Some proto | _ -> None)
|
|
||||||
(fun proto -> Protocol proto);
|
|
||||||
]
|
|
||||||
|
|
||||||
let supported_versions =
|
|
||||||
let open P2p.Version in
|
|
||||||
[ { name = "TEZOS" ;
|
|
||||||
major = 0 ;
|
|
||||||
minor = 0 ;
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
type metadata = unit
|
|
||||||
|
|
||||||
module Metadata = struct
|
|
||||||
type t = metadata
|
|
||||||
let initial = ()
|
|
||||||
let encoding = Data_encoding.empty
|
|
||||||
let score () = 0.
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
let meta_cfg : _ P2p.meta_config = {
|
|
||||||
P2p.encoding = Metadata.encoding ;
|
|
||||||
initial = Metadata.initial ;
|
|
||||||
score = Metadata.score ;
|
|
||||||
}
|
|
||||||
|
|
||||||
and msg_cfg : _ P2p.message_config = {
|
|
||||||
encoding = Message.encoding ;
|
|
||||||
versions = Message.supported_versions ;
|
|
||||||
}
|
|
||||||
|
|
||||||
type net = (Message.t, Metadata.t) P2p.net
|
|
||||||
type pool = (Message.t, Metadata.t) P2p_connection_pool.t
|
|
||||||
|
|
||||||
let create ~config ~limits =
|
|
||||||
P2p.create ~config ~limits meta_cfg msg_cfg
|
|
||||||
|
|
||||||
let broadcast = P2p.broadcast
|
|
||||||
let try_send = P2p.try_send
|
|
||||||
let recv = P2p.recv_any
|
|
||||||
let send = P2p.send
|
|
||||||
let set_metadata = P2p.set_metadata
|
|
||||||
let get_metadata = P2p.get_metadata
|
|
||||||
let connection_info = P2p.connection_info
|
|
||||||
let find_connection = P2p.find_connection
|
|
||||||
let connections = P2p.connections
|
|
||||||
type connection = (Message.t, Metadata.t) P2p.connection
|
|
||||||
let shutdown = P2p.shutdown
|
|
||||||
let roll = P2p.roll
|
|
||||||
let maintain = P2p.maintain
|
|
||||||
let faked_network = P2p.faked_network
|
|
||||||
|
|
||||||
module Raw = struct
|
|
||||||
type 'a t = 'a P2p.Raw.t =
|
|
||||||
| Bootstrap
|
|
||||||
| Advertise of Point.t list
|
|
||||||
| Message of 'a
|
|
||||||
| Disconnect
|
|
||||||
type message = Message.t t
|
|
||||||
let encoding = P2p.Raw.encoding msg_cfg.encoding
|
|
||||||
let supported_versions = msg_cfg.versions
|
|
||||||
end
|
|
||||||
|
|
||||||
module RPC = struct
|
|
||||||
let stat net = P2p.RPC.stat net
|
|
||||||
|
|
||||||
module Event = P2p.RPC.Event
|
|
||||||
|
|
||||||
let watch = P2p.RPC.watch
|
|
||||||
|
|
||||||
let connect = P2p.RPC.connect
|
|
||||||
|
|
||||||
module Connection = struct
|
|
||||||
let info = P2p.RPC.Connection.info
|
|
||||||
let kick = P2p.RPC.Connection.kick
|
|
||||||
let list = P2p.RPC.Connection.list
|
|
||||||
let count = P2p.RPC.Connection.count
|
|
||||||
end
|
|
||||||
|
|
||||||
module Point = struct
|
|
||||||
type info = P2p.RPC.Point.info
|
|
||||||
module Event = P2p_connection_pool_types.Point_info.Event
|
|
||||||
|
|
||||||
let info = P2p.RPC.Point.info
|
|
||||||
let events = P2p.RPC.Point.events
|
|
||||||
let infos = P2p.RPC.Point.infos
|
|
||||||
let watch = P2p.RPC.Point.watch
|
|
||||||
end
|
|
||||||
|
|
||||||
module Peer_id = struct
|
|
||||||
type info = P2p.RPC.Peer_id.info
|
|
||||||
module Event = P2p_connection_pool_types.Peer_info.Event
|
|
||||||
|
|
||||||
let info = P2p.RPC.Peer_id.info
|
|
||||||
let events = P2p.RPC.Peer_id.events
|
|
||||||
let infos = P2p.RPC.Peer_id.infos
|
|
||||||
let watch = P2p.RPC.Peer_id.watch
|
|
||||||
end
|
|
||||||
end
|
|
@ -1,119 +0,0 @@
|
|||||||
|
|
||||||
open P2p
|
|
||||||
|
|
||||||
type net
|
|
||||||
|
|
||||||
(** A faked p2p layer, which do not initiate any connection
|
|
||||||
nor open any listening socket *)
|
|
||||||
val faked_network : net
|
|
||||||
|
|
||||||
(** Main network initialisation function *)
|
|
||||||
val create : config:config -> limits:limits -> net Lwt.t
|
|
||||||
|
|
||||||
(** A maintenance operation : try and reach the ideal number of peers *)
|
|
||||||
val maintain : net -> unit Lwt.t
|
|
||||||
|
|
||||||
(** Voluntarily drop some connections and replace them by new buddies *)
|
|
||||||
val roll : net -> unit Lwt.t
|
|
||||||
|
|
||||||
(** Close all connections properly *)
|
|
||||||
val shutdown : net -> unit Lwt.t
|
|
||||||
|
|
||||||
(** A connection to a peer *)
|
|
||||||
type connection
|
|
||||||
|
|
||||||
(** Access the domain of active connections *)
|
|
||||||
val connections : net -> connection list
|
|
||||||
|
|
||||||
(** Return the active connection with identity [peer_id] *)
|
|
||||||
val find_connection : net -> Peer_id.t -> connection option
|
|
||||||
|
|
||||||
(** Access the info of an active connection. *)
|
|
||||||
val connection_info : net -> connection -> Connection_info.t
|
|
||||||
|
|
||||||
(** Accessors for meta information about a global identifier *)
|
|
||||||
|
|
||||||
type metadata = unit
|
|
||||||
|
|
||||||
val get_metadata : net -> Peer_id.t -> metadata option
|
|
||||||
val set_metadata : net -> Peer_id.t -> metadata -> unit
|
|
||||||
|
|
||||||
type net_id = Store.net_id
|
|
||||||
|
|
||||||
type msg =
|
|
||||||
|
|
||||||
| Discover_blocks of net_id * Block_hash.t list (* Block locator *)
|
|
||||||
| Block_inventory of net_id * Block_hash.t list
|
|
||||||
|
|
||||||
| Get_blocks of Block_hash.t list
|
|
||||||
| Block of MBytes.t
|
|
||||||
|
|
||||||
| Current_operations of net_id
|
|
||||||
| Operation_inventory of net_id * Operation_hash.t list
|
|
||||||
|
|
||||||
| Get_operations of Operation_hash.t list
|
|
||||||
| Operation of MBytes.t
|
|
||||||
|
|
||||||
| Get_protocols of Protocol_hash.t list
|
|
||||||
| Protocol of MBytes.t
|
|
||||||
|
|
||||||
(** Wait for a payload from any connection in the network *)
|
|
||||||
val recv : net -> (connection * msg) Lwt.t
|
|
||||||
|
|
||||||
(** [send net conn msg] is a thread that returns when [msg] has been
|
|
||||||
successfully enqueued in the send queue. *)
|
|
||||||
val send : net -> connection -> msg -> unit Lwt.t
|
|
||||||
|
|
||||||
(** [try_send net conn msg] is [true] if [msg] has been added to the
|
|
||||||
send queue for [peer], [false] otherwise *)
|
|
||||||
val try_send : net -> connection -> msg -> bool
|
|
||||||
|
|
||||||
(** Send a payload to all peers *)
|
|
||||||
val broadcast : net -> msg -> unit
|
|
||||||
|
|
||||||
(**/**)
|
|
||||||
module Raw : sig
|
|
||||||
type 'a t =
|
|
||||||
| Bootstrap
|
|
||||||
| Advertise of Point.t list
|
|
||||||
| Message of 'a
|
|
||||||
| Disconnect
|
|
||||||
type message = msg t
|
|
||||||
val encoding: message Data_encoding.t
|
|
||||||
val supported_versions: Version.t list
|
|
||||||
end
|
|
||||||
|
|
||||||
module RPC : sig
|
|
||||||
val stat : net -> Stat.t
|
|
||||||
|
|
||||||
module Event = P2p_connection_pool.LogEvent
|
|
||||||
val watch : net -> Event.t Lwt_stream.t * Watcher.stopper
|
|
||||||
val connect : net -> Point.t -> float -> unit tzresult Lwt.t
|
|
||||||
|
|
||||||
module Connection : sig
|
|
||||||
val info : net -> Peer_id.t -> Connection_info.t option
|
|
||||||
val kick : net -> Peer_id.t -> bool -> unit Lwt.t
|
|
||||||
val list : net -> Connection_info.t list
|
|
||||||
val count : net -> int
|
|
||||||
end
|
|
||||||
|
|
||||||
module Point : sig
|
|
||||||
open P2p.RPC.Point
|
|
||||||
module Event = Event
|
|
||||||
|
|
||||||
val info : net -> Point.t -> info option
|
|
||||||
val events : ?max:int -> ?rev:bool -> net -> Point.t -> Event.t list
|
|
||||||
val infos : ?restrict:state list -> net -> (Point.t * info) list
|
|
||||||
val watch : net -> Point.t -> Event.t Lwt_stream.t * Watcher.stopper
|
|
||||||
end
|
|
||||||
|
|
||||||
module Peer_id : sig
|
|
||||||
open P2p.RPC.Peer_id
|
|
||||||
module Event = Event
|
|
||||||
|
|
||||||
val info : net -> Peer_id.t -> info option
|
|
||||||
val events : ?max:int -> ?rev:bool -> net -> Peer_id.t -> Event.t list
|
|
||||||
val infos : ?restrict:state list -> net -> (Peer_id.t * info) list
|
|
||||||
val watch : net -> Peer_id.t -> Event.t Lwt_stream.t * Watcher.stopper
|
|
||||||
end
|
|
||||||
end
|
|
@ -10,13 +10,16 @@
|
|||||||
open Logging.Node.Validator
|
open Logging.Node.Validator
|
||||||
|
|
||||||
type worker = {
|
type worker = {
|
||||||
p2p: Tezos_p2p.net ;
|
|
||||||
activate: ?parent:t -> State.Net.t -> t Lwt.t ;
|
activate: ?parent:t -> State.Net.t -> t Lwt.t ;
|
||||||
get: State.net_id -> t tzresult Lwt.t ;
|
get: State.Net_id.t -> t tzresult Lwt.t ;
|
||||||
get_exn: State.net_id -> t Lwt.t ;
|
get_exn: State.Net_id.t -> t Lwt.t ;
|
||||||
deactivate: t -> unit Lwt.t ;
|
deactivate: t -> unit Lwt.t ;
|
||||||
notify_block: Block_hash.t -> Store.block -> unit Lwt.t ;
|
inject_block:
|
||||||
|
?force:bool -> MBytes.t ->
|
||||||
|
(Block_hash.t * State.Valid_block.t tzresult Lwt.t) tzresult Lwt.t ;
|
||||||
|
notify_block: Block_hash.t -> Store.Block_header.t -> unit Lwt.t ;
|
||||||
shutdown: unit -> unit Lwt.t ;
|
shutdown: unit -> unit Lwt.t ;
|
||||||
|
valid_block_input: State.Valid_block.t Watcher.input ;
|
||||||
}
|
}
|
||||||
|
|
||||||
and t = {
|
and t = {
|
||||||
@ -25,26 +28,29 @@ and t = {
|
|||||||
parent: t option ;
|
parent: t option ;
|
||||||
mutable child: t option ;
|
mutable child: t option ;
|
||||||
prevalidator: Prevalidator.t ;
|
prevalidator: Prevalidator.t ;
|
||||||
notify_block: Block_hash.t -> Store.block -> unit Lwt.t ;
|
net_db: Distributed_db.net ;
|
||||||
|
notify_block: Block_hash.t -> Store.Block_header.t -> unit Lwt.t ;
|
||||||
fetch_block: Block_hash.t -> State.Valid_block.t tzresult Lwt.t ;
|
fetch_block: Block_hash.t -> State.Valid_block.t tzresult Lwt.t ;
|
||||||
create_child: State.Valid_block.t -> unit tzresult Lwt.t ;
|
create_child: State.Valid_block.t -> unit tzresult Lwt.t ;
|
||||||
test_validator: unit -> (t * State.Net.t) option ;
|
test_validator: unit -> (t * Distributed_db.net) option ;
|
||||||
shutdown: unit -> unit Lwt.t ;
|
shutdown: unit -> unit Lwt.t ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let net_state { net } = net
|
||||||
|
let net_db { net_db } = net_db
|
||||||
|
|
||||||
let activate w net = w.activate net
|
let activate w net = w.activate net
|
||||||
let deactivate t = t.worker.deactivate t
|
let deactivate t = t.worker.deactivate t
|
||||||
let get w = w.get
|
let get w = w.get
|
||||||
let get_exn w = w.get_exn
|
let get_exn w = w.get_exn
|
||||||
let notify_block w = w.notify_block
|
let notify_block w = w.notify_block
|
||||||
|
let inject_block w = w.inject_block
|
||||||
let shutdown w = w.shutdown ()
|
let shutdown w = w.shutdown ()
|
||||||
let test_validator w = w.test_validator ()
|
let test_validator w = w.test_validator ()
|
||||||
|
|
||||||
let fetch_block v = v.fetch_block
|
let fetch_block v = v.fetch_block
|
||||||
let prevalidator v = v.prevalidator
|
let prevalidator v = v.prevalidator
|
||||||
|
|
||||||
let broadcast w m = Tezos_p2p.broadcast w.p2p m
|
|
||||||
|
|
||||||
(** Current block computation *)
|
(** Current block computation *)
|
||||||
|
|
||||||
let may_change_test_network v (block: State.Valid_block.t) =
|
let may_change_test_network v (block: State.Valid_block.t) =
|
||||||
@ -53,9 +59,9 @@ let may_change_test_network v (block: State.Valid_block.t) =
|
|||||||
| None, None -> false
|
| None, None -> false
|
||||||
| Some _, None
|
| Some _, None
|
||||||
| None, Some _ -> true
|
| None, Some _ -> true
|
||||||
| Some (Net net_id, _), Some { net } ->
|
| Some (net_id, _), Some { net } ->
|
||||||
let Store.Net net_id' = State.Net.id net in
|
let net_id' = State.Net.id net in
|
||||||
not (Block_hash.equal net_id net_id') in
|
not (State.Net_id.equal net_id net_id') in
|
||||||
if change then begin
|
if change then begin
|
||||||
v.create_child block >>= function
|
v.create_child block >>= function
|
||||||
| Ok () -> Lwt.return_unit
|
| Ok () -> Lwt.return_unit
|
||||||
@ -66,15 +72,16 @@ let may_change_test_network v (block: State.Valid_block.t) =
|
|||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
|
|
||||||
let rec may_set_head v (block: State.Valid_block.t) =
|
let rec may_set_head v (block: State.Valid_block.t) =
|
||||||
State.Net.Blockchain.head v.net >>= fun head ->
|
State.Valid_block.Current.head v.net >>= fun head ->
|
||||||
if Fitness.compare head.fitness block.fitness >= 0 then
|
if Fitness.compare head.fitness block.fitness >= 0 then
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
else
|
else
|
||||||
State.Net.Blockchain.test_and_set_head v.net ~old:head block >>= function
|
State.Valid_block.Current.test_and_set_head v.net
|
||||||
|
~old:head block >>= function
|
||||||
| false -> may_set_head v block
|
| false -> may_set_head v block
|
||||||
| true ->
|
| true ->
|
||||||
broadcast v.worker Tezos_p2p.(Block_inventory (State.Net.id v.net, [])) ;
|
Distributed_db.broadcast_head v.net_db block.hash [] ;
|
||||||
Prevalidator.flush v.prevalidator ;
|
Prevalidator.flush v.prevalidator block ;
|
||||||
may_change_test_network v block >>= fun () ->
|
may_change_test_network v block >>= fun () ->
|
||||||
lwt_log_notice "update current head %a %a %a(%t)"
|
lwt_log_notice "update current head %a %a %a(%t)"
|
||||||
Block_hash.pp_short block.hash
|
Block_hash.pp_short block.hash
|
||||||
@ -92,22 +99,19 @@ let rec may_set_head v (block: State.Valid_block.t) =
|
|||||||
|
|
||||||
type error += Invalid_operation of Operation_hash.t
|
type error += Invalid_operation of Operation_hash.t
|
||||||
|
|
||||||
let apply_block net (pred: State.Valid_block.t) hash (block: State.Block.t) =
|
let apply_block net db
|
||||||
let state = State.Net.state net in
|
(pred: State.Valid_block.t) hash (block: State.Block_header.t) =
|
||||||
let State.Net id = State.Net.id net in
|
let id = State.Net.id net in
|
||||||
lwt_log_notice "validate block %a (after %a), net %a"
|
lwt_log_notice "validate block %a (after %a), net %a"
|
||||||
Block_hash.pp_short hash
|
Block_hash.pp_short hash
|
||||||
Block_hash.pp_short block.shell.predecessor
|
Block_hash.pp_short block.shell.predecessor
|
||||||
Block_hash.pp_short id
|
State.Net_id.pp id
|
||||||
>>= fun () ->
|
>>= fun () ->
|
||||||
lwt_log_info "validation of %a: looking for dependencies..."
|
lwt_log_info "validation of %a: looking for dependencies..."
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
map_p
|
Lwt_list.map_p
|
||||||
(fun op ->
|
(fun op -> Distributed_db.Operation.fetch db op)
|
||||||
State.Operation.fetch state (State.Net.id net) op >>= function
|
block.shell.operations >>= fun operations ->
|
||||||
| { data = Error _ as e} -> Lwt.return e
|
|
||||||
| { data = Ok data } -> Lwt.return (Ok data))
|
|
||||||
block.shell.operations >>=? fun operations ->
|
|
||||||
lwt_debug "validation of %a: found operations"
|
lwt_debug "validation of %a: found operations"
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
begin (* Are we validating a block in an expired test network ? *)
|
begin (* Are we validating a block in an expired test network ? *)
|
||||||
@ -133,7 +137,8 @@ let apply_block net (pred: State.Valid_block.t) hash (block: State.Block.t) =
|
|||||||
(fun op_hash raw ->
|
(fun op_hash raw ->
|
||||||
Lwt.return (Proto.parse_operation op_hash raw)
|
Lwt.return (Proto.parse_operation op_hash raw)
|
||||||
|> trace (Invalid_operation op_hash))
|
|> trace (Invalid_operation op_hash))
|
||||||
block.Store.shell.operations operations >>=? fun parsed_operations ->
|
block.Store.Block_header.shell.operations
|
||||||
|
operations >>=? fun parsed_operations ->
|
||||||
lwt_debug "validation of %a: applying block..."
|
lwt_debug "validation of %a: applying block..."
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
Proto.apply
|
Proto.apply
|
||||||
@ -145,117 +150,285 @@ let apply_block net (pred: State.Valid_block.t) hash (block: State.Block.t) =
|
|||||||
(** *)
|
(** *)
|
||||||
|
|
||||||
module Validation_scheduler = struct
|
module Validation_scheduler = struct
|
||||||
let name = "validator"
|
|
||||||
type state = State.Net.t * Block_hash_set.t ref
|
|
||||||
type rdata = t
|
|
||||||
type data = Store.block Time.timed_data
|
|
||||||
let init_request (net, _) hash =
|
|
||||||
State.Block.fetch (State.Net.state net) (State.Net.id net) hash
|
|
||||||
|
|
||||||
let process
|
type state = {
|
||||||
net v ~get:get_context ~set:set_context hash block =
|
db: Distributed_db.net ;
|
||||||
match block with
|
running: Block_hash.Set.t ref ;
|
||||||
| { Time.data = block } ->
|
}
|
||||||
get_context block.Store.shell.predecessor >>= function
|
|
||||||
| Error _ ->
|
let init_request { db } hash =
|
||||||
set_context hash (Error [(* TODO *)])
|
Distributed_db.Block_header.fetch db hash
|
||||||
| Ok _context ->
|
|
||||||
lwt_debug "process %a" Block_hash.pp_short hash >>= fun () ->
|
let process { db } v ~get:get_context ~set:set_context hash block =
|
||||||
begin
|
let state = Distributed_db.state db in
|
||||||
State.Net.Blockchain.genesis net >>= fun genesis ->
|
get_context block.State.Block_header.shell.predecessor >>= function
|
||||||
if Block_hash.equal genesis.hash block.shell.predecessor then
|
| Error _ ->
|
||||||
Lwt.return genesis
|
set_context hash (Error [(* TODO *)])
|
||||||
else
|
| Ok _context ->
|
||||||
State.Valid_block.read_exn
|
lwt_debug "process %a" Block_hash.pp_short hash >>= fun () ->
|
||||||
(State.Net.state net) block.shell.predecessor
|
begin
|
||||||
end >>= fun pred ->
|
State.Valid_block.Current.genesis state >>= fun genesis ->
|
||||||
apply_block net pred hash block >>= function
|
if Block_hash.equal genesis.hash block.shell.predecessor then
|
||||||
| Error ([State.Unknown_protocol _] as err) ->
|
Lwt.return genesis
|
||||||
|
else
|
||||||
|
State.Valid_block.read_exn state block.shell.predecessor
|
||||||
|
end >>= fun pred ->
|
||||||
|
apply_block state db pred hash block >>= function
|
||||||
|
| Error ([State.Unknown_protocol _] as err) ->
|
||||||
|
lwt_log_error
|
||||||
|
"@[<v 2>Ignoring block %a@ %a@]"
|
||||||
|
Block_hash.pp_short hash
|
||||||
|
Error_monad.pp_print_error err
|
||||||
|
| Error exns as error ->
|
||||||
|
set_context hash error >>= fun () ->
|
||||||
|
lwt_warn "Failed to validate block %a."
|
||||||
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
|
lwt_debug "%a" Error_monad.pp_print_error exns
|
||||||
|
| Ok new_context ->
|
||||||
|
(* The sanity check `set_context` detects differences
|
||||||
|
between the computed fitness and the fitness announced
|
||||||
|
in the block header. When distinct `Valid_block.read`
|
||||||
|
will return an error. *)
|
||||||
|
set_context hash (Ok new_context) >>= fun () ->
|
||||||
|
State.Valid_block.read state hash >>= function
|
||||||
|
| Error err ->
|
||||||
lwt_log_error
|
lwt_log_error
|
||||||
"@[<v 2>Ignoring block %a@ %a@]"
|
"@[<v 2>Ignoring block %a@ %a@]"
|
||||||
Block_hash.pp_short hash
|
Block_hash.pp_short hash
|
||||||
Error_monad.pp_print_error err
|
Error_monad.pp_print_error err
|
||||||
| Error exns as error ->
|
| Ok block ->
|
||||||
set_context hash error >>= fun () ->
|
lwt_debug
|
||||||
lwt_warn "Failed to validate block %a."
|
"validation of %a: reevaluate current block"
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
lwt_debug "%a" Error_monad.pp_print_error exns
|
Watcher.notify v.worker.valid_block_input block ;
|
||||||
| Ok new_context ->
|
may_set_head v block
|
||||||
(* The sanity check `set_context` detects differences
|
|
||||||
between the computed fitness and the fitness announced
|
|
||||||
in the block header. When distinct `Valid_block.read`
|
|
||||||
will return an error. *)
|
|
||||||
set_context hash (Ok new_context) >>= fun () ->
|
|
||||||
State.Valid_block.read
|
|
||||||
(State.Net.state net) hash >>= function
|
|
||||||
| None ->
|
|
||||||
lwt_log_error
|
|
||||||
"Unexpected error while saving context for block %a."
|
|
||||||
Block_hash.pp_short hash
|
|
||||||
| Some (Error err) ->
|
|
||||||
lwt_log_error
|
|
||||||
"@[<v 2>Ignoring block %a@ %a@]"
|
|
||||||
Block_hash.pp_short hash
|
|
||||||
Error_monad.pp_print_error err
|
|
||||||
| Some (Ok block) ->
|
|
||||||
lwt_debug
|
|
||||||
"validation of %a: reevaluate current block"
|
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
|
||||||
may_set_head v block
|
|
||||||
|
|
||||||
let request (net, running) ~get ~set pendings =
|
let request state ~get ~set pendings =
|
||||||
let time = Time.now () in
|
let time = Time.now () in
|
||||||
let min_block b pb =
|
let min_block b pb =
|
||||||
match pb with
|
match pb with
|
||||||
| None -> Some b
|
| None -> Some b
|
||||||
| Some pb when b.Store.shell.timestamp < pb.Store.shell.timestamp -> Some b
|
| Some pb
|
||||||
|
when b.Store.Block_header.shell.timestamp
|
||||||
|
< pb.Store.Block_header.shell.timestamp ->
|
||||||
|
Some b
|
||||||
| Some _ as pb -> pb in
|
| Some _ as pb -> pb in
|
||||||
let next =
|
let next =
|
||||||
List.fold_left
|
List.fold_left
|
||||||
(fun acc (hash, block, v) ->
|
(fun acc (hash, block, v) ->
|
||||||
match block with
|
match block with
|
||||||
| { Time.data = block }
|
| Error _ ->
|
||||||
when Time.(block.Store.shell.timestamp > time) ->
|
acc
|
||||||
min_block block acc
|
| Ok block ->
|
||||||
| { Time.data = _ } as block ->
|
if Time.(block.Store.Block_header.shell.timestamp > time) then
|
||||||
if not (Block_hash_set.mem hash !running) then begin
|
min_block block acc
|
||||||
running := Block_hash_set.add hash !running ;
|
else begin
|
||||||
Lwt.async (fun () ->
|
if not (Block_hash.Set.mem hash !(state.running)) then begin
|
||||||
process net v
|
state.running := Block_hash.Set.add hash !(state.running) ;
|
||||||
~get:(get v) ~set:set hash block >>= fun () ->
|
Lwt.async (fun () ->
|
||||||
running := Block_hash_set.remove hash !running ;
|
process state v
|
||||||
Lwt.return_unit
|
~get:(get v) ~set hash block >>= fun () ->
|
||||||
)
|
state.running :=
|
||||||
end ;
|
Block_hash.Set.remove hash !(state.running) ;
|
||||||
acc)
|
Lwt.return_unit
|
||||||
|
)
|
||||||
|
end ;
|
||||||
|
acc
|
||||||
|
end)
|
||||||
None
|
None
|
||||||
pendings in
|
pendings in
|
||||||
match next with
|
match next with
|
||||||
| None -> 0.
|
| None -> 0.
|
||||||
| Some b -> Int64.to_float (Time.diff b.Store.shell.timestamp time)
|
| Some b -> Int64.to_float (Time.diff b.Store.Block_header.shell.timestamp time)
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module Context_db =
|
module Context_db = struct
|
||||||
Persist.MakeImperativeProxy
|
|
||||||
(State.Valid_block.Store)(Block_hash_table)(Validation_scheduler)
|
|
||||||
|
|
||||||
let rec create_validator ?parent worker net =
|
type key = Block_hash.t
|
||||||
|
type value = State.Valid_block.t
|
||||||
|
|
||||||
Prevalidator.create worker.p2p net >>= fun prevalidator ->
|
type data =
|
||||||
let state = State.Net.state net in
|
{ validator: t ;
|
||||||
|
state: [ `Inited of Store.Block_header.t tzresult
|
||||||
|
| `Initing of Store.Block_header.t tzresult Lwt.t ] ;
|
||||||
|
wakener: State.Valid_block.t tzresult Lwt.u }
|
||||||
|
|
||||||
|
type t =
|
||||||
|
{ tbl : data Block_hash.Table.t ;
|
||||||
|
canceler : Lwt_utils.Canceler.t ;
|
||||||
|
worker_trigger: unit -> unit;
|
||||||
|
worker_waiter: unit -> unit Lwt.t ;
|
||||||
|
worker: unit Lwt.t ;
|
||||||
|
vstate : Validation_scheduler.state }
|
||||||
|
|
||||||
|
let pending_requests { tbl } =
|
||||||
|
Block_hash.Table.fold
|
||||||
|
(fun h data acc ->
|
||||||
|
match data.state with
|
||||||
|
| `Initing _ -> acc
|
||||||
|
| `Inited d -> (h, d, data.validator) :: acc)
|
||||||
|
tbl []
|
||||||
|
|
||||||
|
let pending { tbl } hash = Block_hash.Table.mem tbl hash
|
||||||
|
|
||||||
|
let request { tbl ; worker_trigger ; vstate } validator hash =
|
||||||
|
assert (not (Block_hash.Table.mem tbl hash));
|
||||||
|
let waiter, wakener = Lwt.wait () in
|
||||||
|
let data =
|
||||||
|
Distributed_db.Block_header.fetch vstate.db hash >>= return in
|
||||||
|
match Lwt.state data with
|
||||||
|
| Lwt.Return data ->
|
||||||
|
let state = `Inited data in
|
||||||
|
Block_hash.Table.add tbl hash { validator ; state ; wakener } ;
|
||||||
|
worker_trigger () ;
|
||||||
|
waiter
|
||||||
|
| _ ->
|
||||||
|
let state = `Initing data in
|
||||||
|
Block_hash.Table.add tbl hash { validator ; state ; wakener } ;
|
||||||
|
Lwt.async
|
||||||
|
(fun () ->
|
||||||
|
data >>= fun data ->
|
||||||
|
let state = `Inited data in
|
||||||
|
Block_hash.Table.add tbl hash { validator ; state ; wakener } ;
|
||||||
|
worker_trigger () ;
|
||||||
|
Lwt.return_unit) ;
|
||||||
|
waiter
|
||||||
|
|
||||||
|
let prefetch ({ vstate ; tbl } as session) validator hash =
|
||||||
|
let state = Distributed_db.state vstate.db in
|
||||||
|
Lwt.ignore_result
|
||||||
|
(State.Valid_block.known state hash >>= fun exists ->
|
||||||
|
if not exists && not (Block_hash.Table.mem tbl hash) then
|
||||||
|
request session validator hash >>= fun _ -> Lwt.return_unit
|
||||||
|
else
|
||||||
|
Lwt.return_unit)
|
||||||
|
|
||||||
|
let known { vstate } hash =
|
||||||
|
let state = Distributed_db.state vstate.db in
|
||||||
|
State.Valid_block.known state hash
|
||||||
|
|
||||||
|
let read { vstate } hash =
|
||||||
|
let state = Distributed_db.state vstate.db in
|
||||||
|
State.Valid_block.read state hash
|
||||||
|
|
||||||
|
let fetch ({ vstate ; tbl } as session) validator hash =
|
||||||
|
let state = Distributed_db.state vstate.db in
|
||||||
|
try Lwt.waiter_of_wakener (Block_hash.Table.find tbl hash).wakener
|
||||||
|
with Not_found ->
|
||||||
|
State.Valid_block.read_opt state hash >>= function
|
||||||
|
| Some op -> Lwt.return (Ok op)
|
||||||
|
| None ->
|
||||||
|
try Lwt.waiter_of_wakener (Block_hash.Table.find tbl hash).wakener
|
||||||
|
with Not_found -> request session validator hash
|
||||||
|
|
||||||
|
let store { vstate ; tbl } hash data =
|
||||||
|
let state = Distributed_db.state vstate.db in
|
||||||
|
begin
|
||||||
|
match data with
|
||||||
|
| Ok data ->
|
||||||
|
Distributed_db.Block_header.commit vstate.db hash >>= fun () ->
|
||||||
|
State.Valid_block.store state hash data >>= fun block ->
|
||||||
|
Lwt.return (block <> Ok None)
|
||||||
|
| Error err ->
|
||||||
|
State.Block_header.mark_invalid state hash err
|
||||||
|
end >>= fun changed ->
|
||||||
|
try
|
||||||
|
State.Valid_block.read state hash >>= fun block ->
|
||||||
|
let wakener = (Block_hash.Table.find tbl hash).wakener in
|
||||||
|
Block_hash.Table.remove tbl hash;
|
||||||
|
Lwt.wakeup wakener block ;
|
||||||
|
Lwt.return changed
|
||||||
|
with Not_found -> Lwt.return changed
|
||||||
|
|
||||||
|
let create vstate =
|
||||||
|
let tbl = Block_hash.Table.create 50 in
|
||||||
|
let canceler = Lwt_utils.Canceler.create () in
|
||||||
|
let worker_trigger, worker_waiter = Lwt_utils.trigger () in
|
||||||
|
let session =
|
||||||
|
{ tbl ; vstate ; worker = Lwt.return () ;
|
||||||
|
canceler ; worker_trigger ; worker_waiter } in
|
||||||
|
let worker =
|
||||||
|
let rec worker_loop () =
|
||||||
|
Lwt_utils.protect ~canceler begin fun () ->
|
||||||
|
worker_waiter () >>= return
|
||||||
|
end >>= function
|
||||||
|
| Error [Lwt_utils.Canceled] -> Lwt.return_unit
|
||||||
|
| Error err ->
|
||||||
|
lwt_log_error
|
||||||
|
"@[Unexpected error in validation:@ %a@]"
|
||||||
|
pp_print_error err >>= fun () ->
|
||||||
|
worker_loop ()
|
||||||
|
| Ok () ->
|
||||||
|
begin
|
||||||
|
match pending_requests session with
|
||||||
|
| [] -> ()
|
||||||
|
| requests ->
|
||||||
|
let get = fetch session
|
||||||
|
and set k v =
|
||||||
|
store session k v >>= fun _ -> Lwt.return_unit in
|
||||||
|
let timeout =
|
||||||
|
Validation_scheduler.request
|
||||||
|
vstate ~get ~set requests in
|
||||||
|
if timeout > 0. then
|
||||||
|
Lwt.ignore_result
|
||||||
|
(Lwt_unix.sleep timeout >|= worker_trigger);
|
||||||
|
end ;
|
||||||
|
worker_loop ()
|
||||||
|
in
|
||||||
|
Lwt_utils.worker "validation"
|
||||||
|
~run:worker_loop
|
||||||
|
~cancel:(fun () -> Lwt_utils.Canceler.cancel canceler) in
|
||||||
|
{ session with worker }
|
||||||
|
|
||||||
|
let shutdown { canceler ; worker } =
|
||||||
|
Lwt_utils.Canceler.cancel canceler >>= fun () -> worker
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
let rec create_validator ?parent worker state db net =
|
||||||
|
|
||||||
|
let queue = Lwt_pipe.create () in
|
||||||
|
let current_ops = ref (fun () -> []) in
|
||||||
|
|
||||||
|
let callback : Distributed_db.callback = {
|
||||||
|
notify_branch = begin fun gid locator ->
|
||||||
|
Lwt.async (fun () -> Lwt_pipe.push queue (`Branch (gid, locator)))
|
||||||
|
end ;
|
||||||
|
current_branch = begin fun size ->
|
||||||
|
State.Valid_block.Current.head net >>= fun head ->
|
||||||
|
State.Valid_block.Helpers.block_locator net size head
|
||||||
|
end ;
|
||||||
|
notify_head = begin fun gid block ops ->
|
||||||
|
Lwt.async (fun () -> Lwt_pipe.push queue (`Head (gid, block, ops))) ;
|
||||||
|
end ;
|
||||||
|
current_head = begin fun size ->
|
||||||
|
State.Valid_block.Current.head net >>= fun head ->
|
||||||
|
Lwt.return (head.hash, Utils.list_sub (!current_ops ()) size)
|
||||||
|
end ;
|
||||||
|
disconnection = (fun _gid -> ()) ;
|
||||||
|
} in
|
||||||
|
|
||||||
|
let net_id = State.Net.id net in
|
||||||
|
let net_db = Distributed_db.activate ~callback db net in
|
||||||
let proxy =
|
let proxy =
|
||||||
Context_db.create
|
Context_db.create { db = net_db ; running = ref Block_hash.Set.empty } in
|
||||||
(net, ref Block_hash_set.empty)
|
|
||||||
(State.Valid_block.get_store state) in
|
Prevalidator.create net_db >>= fun prevalidator ->
|
||||||
State.Net.activate net ;
|
current_ops :=
|
||||||
|
(fun () ->
|
||||||
|
let res, _ = Prevalidator.operations prevalidator in
|
||||||
|
res.applied);
|
||||||
|
let new_blocks = ref Lwt.return_unit in
|
||||||
|
|
||||||
let shutdown () =
|
let shutdown () =
|
||||||
lwt_log_notice "shutdown %a"
|
lwt_log_notice "shutdown %a" State.Net_id.pp net_id >>= fun () ->
|
||||||
Store.pp_net_id (State.Net.id net) >>= fun () ->
|
Distributed_db.deactivate net_db >>= fun () ->
|
||||||
State.Net.deactivate net ;
|
Lwt_pipe.close queue ;
|
||||||
Lwt.join [
|
Lwt.join [
|
||||||
Context_db.shutdown proxy ;
|
Context_db.shutdown proxy ;
|
||||||
|
!new_blocks ;
|
||||||
Prevalidator.shutdown prevalidator ;
|
Prevalidator.shutdown prevalidator ;
|
||||||
]
|
]
|
||||||
in
|
in
|
||||||
@ -266,6 +439,7 @@ let rec create_validator ?parent worker net =
|
|||||||
parent ;
|
parent ;
|
||||||
child = None ;
|
child = None ;
|
||||||
prevalidator ;
|
prevalidator ;
|
||||||
|
net_db ;
|
||||||
shutdown ;
|
shutdown ;
|
||||||
notify_block ;
|
notify_block ;
|
||||||
fetch_block ;
|
fetch_block ;
|
||||||
@ -276,14 +450,14 @@ let rec create_validator ?parent worker net =
|
|||||||
and notify_block hash block =
|
and notify_block hash block =
|
||||||
lwt_debug "-> Validator.notify_block %a"
|
lwt_debug "-> Validator.notify_block %a"
|
||||||
Block_hash.pp_short hash >>= fun () ->
|
Block_hash.pp_short hash >>= fun () ->
|
||||||
State.Net.Blockchain.head net >>= fun head ->
|
State.Valid_block.Current.head net >>= fun head ->
|
||||||
if Fitness.compare head.fitness block.shell.fitness <= 0 then
|
if Fitness.compare head.fitness block.shell.fitness <= 0 then
|
||||||
Context_db.prefetch proxy v hash;
|
Context_db.prefetch proxy v hash ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
|
|
||||||
and fetch_block hash =
|
and fetch_block hash =
|
||||||
Context_db.fetch proxy v hash >>=? fun _context ->
|
Context_db.fetch proxy v hash >>=? fun _context ->
|
||||||
State.Valid_block.read_exn (State.Net.state net) hash >>= fun block ->
|
State.Valid_block.read_exn net hash >>= fun block ->
|
||||||
return block
|
return block
|
||||||
|
|
||||||
and create_child block =
|
and create_child block =
|
||||||
@ -296,18 +470,16 @@ let rec create_validator ?parent worker net =
|
|||||||
end >>= fun () ->
|
end >>= fun () ->
|
||||||
match block.test_network with
|
match block.test_network with
|
||||||
| None -> return ()
|
| None -> return ()
|
||||||
| Some (Net block as net_id, expiration) ->
|
| Some (net_id, expiration) ->
|
||||||
begin
|
begin
|
||||||
match State.Net.get state net_id with
|
State.Net.get state net_id >>= function
|
||||||
| Ok net_store -> return net_store
|
| Ok net_store -> return net_store
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
State.Valid_block.read_exn state block >>= fun block ->
|
State.Valid_block.fork_testnet
|
||||||
let genesis = {
|
state net block expiration >>=? fun net_store ->
|
||||||
Store.block = block.hash ;
|
State.Valid_block.Current.head net_store >>= fun block ->
|
||||||
time = block.timestamp ;
|
Watcher.notify v.worker.valid_block_input block ;
|
||||||
protocol = block.test_protocol_hash ;
|
return net_store
|
||||||
} in
|
|
||||||
State.Net.create state ~expiration genesis
|
|
||||||
end >>=? fun net_store ->
|
end >>=? fun net_store ->
|
||||||
worker.activate ~parent:v net_store >>= fun child ->
|
worker.activate ~parent:v net_store >>= fun child ->
|
||||||
v.child <- Some child ;
|
v.child <- Some child ;
|
||||||
@ -316,35 +488,54 @@ let rec create_validator ?parent worker net =
|
|||||||
and test_validator () =
|
and test_validator () =
|
||||||
match v.child with
|
match v.child with
|
||||||
| None -> None
|
| None -> None
|
||||||
| Some child -> Some (child, child.net)
|
| Some child -> Some (child, child.net_db)
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
|
new_blocks := begin
|
||||||
|
let rec loop () =
|
||||||
|
Lwt_pipe.pop queue >>= function
|
||||||
|
| `Branch (_gid, locator) ->
|
||||||
|
List.iter (Context_db.prefetch proxy v) locator ;
|
||||||
|
loop ()
|
||||||
|
| `Head (gid, head, ops) ->
|
||||||
|
Context_db.prefetch proxy v head ;
|
||||||
|
List.iter (Prevalidator.notify_operation prevalidator gid) ops ;
|
||||||
|
loop ()
|
||||||
|
in
|
||||||
|
Lwt.catch loop
|
||||||
|
(function Lwt_pipe.Closed -> Lwt.return_unit
|
||||||
|
| exn -> Lwt.fail exn)
|
||||||
|
end ;
|
||||||
|
|
||||||
Lwt.return v
|
Lwt.return v
|
||||||
|
|
||||||
type error += Unknown_network of State.net_id
|
type error += Unknown_network of State.Net_id.t
|
||||||
|
|
||||||
let create_worker p2p state =
|
let create_worker state db =
|
||||||
|
|
||||||
let validators : t Lwt.t Block_hash_table.t = Block_hash_table.create 7 in
|
let validators : t Lwt.t State.Net_id.Table.t =
|
||||||
|
Store.Net_id.Table.create 7 in
|
||||||
|
|
||||||
let get_exn (State.Net net) = Block_hash_table.find validators net in
|
let valid_block_input = Watcher.create_input () in
|
||||||
|
|
||||||
|
let get_exn net = State.Net_id.Table.find validators net in
|
||||||
let get net =
|
let get net =
|
||||||
try get_exn net >>= fun v -> return v
|
try get_exn net >>= fun v -> return v
|
||||||
with Not_found -> fail (State.Unknown_network net) in
|
with Not_found -> fail (State.Unknown_network net) in
|
||||||
let remove (State.Net net) = Block_hash_table.remove validators net in
|
let remove net = State.Net_id.Table.remove validators net in
|
||||||
|
|
||||||
let deactivate { net } =
|
let deactivate { net } =
|
||||||
let id = State.Net.id net in
|
let id = State.Net.id net in
|
||||||
get id >>= function
|
get id >>= function
|
||||||
| Error _ -> Lwt.return_unit
|
| Error _ -> Lwt.return_unit
|
||||||
| Ok v ->
|
| Ok v ->
|
||||||
lwt_log_notice "deactivate network %a" Store.pp_net_id id >>= fun () ->
|
lwt_log_notice "deactivate network %a" State.Net_id.pp id >>= fun () ->
|
||||||
remove id ;
|
remove id ;
|
||||||
v.shutdown ()
|
v.shutdown ()
|
||||||
in
|
in
|
||||||
|
|
||||||
let notify_block hash (block : Store.block) =
|
let notify_block hash (block : Store.Block_header.t) =
|
||||||
match get_exn block.shell.net_id with
|
match get_exn block.shell.net_id with
|
||||||
| exception Not_found -> Lwt.return_unit
|
| exception Not_found -> Lwt.return_unit
|
||||||
| net ->
|
| net ->
|
||||||
@ -358,7 +549,7 @@ let create_worker p2p state =
|
|||||||
let net_maintenance () =
|
let net_maintenance () =
|
||||||
lwt_log_info "net maintenance" >>= fun () ->
|
lwt_log_info "net maintenance" >>= fun () ->
|
||||||
let time = Time.now () in
|
let time = Time.now () in
|
||||||
Block_hash_table.fold
|
Store.Net_id.Table.fold
|
||||||
(fun _ v acc ->
|
(fun _ v acc ->
|
||||||
v >>= fun v ->
|
v >>= fun v ->
|
||||||
acc >>= fun () ->
|
acc >>= fun () ->
|
||||||
@ -366,15 +557,16 @@ let create_worker p2p state =
|
|||||||
| Some eol when Time.(eol <= time) -> deactivate v
|
| Some eol when Time.(eol <= time) -> deactivate v
|
||||||
| Some _ | None -> Lwt.return_unit)
|
| Some _ | None -> Lwt.return_unit)
|
||||||
validators Lwt.return_unit >>= fun () ->
|
validators Lwt.return_unit >>= fun () ->
|
||||||
|
State.Net.all state >>= fun all_net ->
|
||||||
Lwt_list.iter_p
|
Lwt_list.iter_p
|
||||||
(fun net ->
|
(fun net ->
|
||||||
match State.Net.expiration net with
|
match State.Net.expiration net with
|
||||||
| Some eol when Time.(eol <= time) ->
|
| Some eol when Time.(eol <= time) ->
|
||||||
lwt_log_notice "destroy network %a"
|
lwt_log_notice "destroy network %a"
|
||||||
Store.pp_net_id (State.Net.id net) >>= fun () ->
|
State.Net_id.pp (State.Net.id net) >>= fun () ->
|
||||||
State.Net.destroy net
|
State.Net.destroy state net
|
||||||
| Some _ | None -> Lwt.return_unit)
|
| Some _ | None -> Lwt.return_unit)
|
||||||
(State.Net.all state) >>= fun () ->
|
all_net >>= fun () ->
|
||||||
next_net_maintenance := Time.add (Time.now ()) (Int64.of_int 55) ;
|
next_net_maintenance := Time.add (Time.now ()) (Int64.of_int 55) ;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
let next_head_maintenance = ref (Time.now ()) in
|
let next_head_maintenance = ref (Time.now ()) in
|
||||||
@ -414,31 +606,46 @@ let create_worker p2p state =
|
|||||||
let shutdown () =
|
let shutdown () =
|
||||||
cancel () >>= fun () ->
|
cancel () >>= fun () ->
|
||||||
let validators =
|
let validators =
|
||||||
Block_hash_table.fold
|
Store.Net_id.Table.fold
|
||||||
(fun _ (v: t Lwt.t) acc -> (v >>= fun v -> v.shutdown ()) :: acc)
|
(fun _ (v: t Lwt.t) acc -> (v >>= fun v -> v.shutdown ()) :: acc)
|
||||||
validators [] in
|
validators [] in
|
||||||
Lwt.join (maintenance_worker :: validators) in
|
Lwt.join (maintenance_worker :: validators) in
|
||||||
|
|
||||||
|
let inject_block ?(force = false) bytes =
|
||||||
|
Distributed_db.inject_block db bytes >>=? fun (hash, block) ->
|
||||||
|
get block.shell.net_id >>=? fun net ->
|
||||||
|
let validation =
|
||||||
|
State.Valid_block.Current.head net.net >>= fun head ->
|
||||||
|
if force
|
||||||
|
|| Fitness.compare head.fitness block.shell.fitness <= 0 then
|
||||||
|
fetch_block net hash
|
||||||
|
else
|
||||||
|
failwith "Fitness is below the current one" in
|
||||||
|
return (hash, validation) in
|
||||||
|
|
||||||
let rec activate ?parent net =
|
let rec activate ?parent net =
|
||||||
lwt_log_notice "activate network %a"
|
lwt_log_notice "activate network %a"
|
||||||
Store.pp_net_id (State.Net.id net) >>= fun () ->
|
State.Net_id.pp (State.Net.id net) >>= fun () ->
|
||||||
State.Net.Blockchain.genesis net >>= fun genesis ->
|
State.Valid_block.Current.genesis net >>= fun genesis ->
|
||||||
get (Net genesis.hash) >>= function
|
let net_id = State.Net_id.Id genesis.hash in
|
||||||
|
get net_id >>= function
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
let v = create_validator ?parent worker net in
|
let v = create_validator ?parent worker state db net in
|
||||||
Block_hash_table.add validators genesis.hash v ;
|
Store.Net_id.Table.add validators net_id v ;
|
||||||
v
|
v
|
||||||
| Ok v -> Lwt.return v
|
| Ok v -> Lwt.return v
|
||||||
|
|
||||||
and worker = {
|
and worker = {
|
||||||
p2p ;
|
|
||||||
get ; get_exn ;
|
get ; get_exn ;
|
||||||
activate ; deactivate ;
|
activate ; deactivate ;
|
||||||
notify_block ;
|
notify_block ;
|
||||||
|
inject_block ;
|
||||||
shutdown ;
|
shutdown ;
|
||||||
|
valid_block_input ;
|
||||||
}
|
}
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
worker
|
worker
|
||||||
|
|
||||||
|
let watcher { valid_block_input } = Watcher.create_stream valid_block_input
|
||||||
|
@ -9,19 +9,29 @@
|
|||||||
|
|
||||||
type worker
|
type worker
|
||||||
|
|
||||||
val create_worker: Tezos_p2p.net -> State.t -> worker
|
val create_worker: State.t -> Distributed_db.t -> worker
|
||||||
val shutdown: worker -> unit Lwt.t
|
val shutdown: worker -> unit Lwt.t
|
||||||
|
|
||||||
val notify_block: worker -> Block_hash.t -> Store.block -> unit Lwt.t
|
val notify_block: worker -> Block_hash.t -> State.Block_header.t -> unit Lwt.t
|
||||||
|
|
||||||
type t
|
type t
|
||||||
|
|
||||||
val activate: worker -> State.Net.t -> t Lwt.t
|
val activate: worker -> State.Net.t -> t Lwt.t
|
||||||
val get: worker -> State.net_id -> t tzresult Lwt.t
|
val get: worker -> State.Net_id.t -> t tzresult Lwt.t
|
||||||
val get_exn: worker -> State.net_id -> t Lwt.t
|
val get_exn: worker -> State.Net_id.t -> t Lwt.t
|
||||||
val deactivate: t -> unit Lwt.t
|
val deactivate: t -> unit Lwt.t
|
||||||
|
|
||||||
|
val net_state: t -> State.Net.t
|
||||||
|
val net_db: t -> Distributed_db.net
|
||||||
|
|
||||||
val fetch_block:
|
val fetch_block:
|
||||||
t -> Block_hash.t -> State.Valid_block.t tzresult Lwt.t
|
t -> Block_hash.t -> State.Valid_block.t tzresult Lwt.t
|
||||||
|
|
||||||
|
val inject_block:
|
||||||
|
worker -> ?force:bool -> MBytes.t ->
|
||||||
|
(Block_hash.t * State.Valid_block.t tzresult Lwt.t) tzresult Lwt.t
|
||||||
|
|
||||||
val prevalidator: t -> Prevalidator.t
|
val prevalidator: t -> Prevalidator.t
|
||||||
val test_validator: t -> (t * State.Net.t) option
|
val test_validator: t -> (t * Distributed_db.net) option
|
||||||
|
|
||||||
|
val watcher: worker -> State.Valid_block.t Lwt_stream.t * Watcher.stopper
|
||||||
|
@ -41,6 +41,8 @@ let compare f1 f2 =
|
|||||||
let len = compare (List.length f1) (List.length f2) in
|
let len = compare (List.length f1) (List.length f2) in
|
||||||
if len = 0 then compare_rec f1 f2 else len
|
if len = 0 then compare_rec f1 f2 else len
|
||||||
|
|
||||||
|
let equal f1 f2 = compare f1 f2 = 0
|
||||||
|
|
||||||
let rec pp fmt = function
|
let rec pp fmt = function
|
||||||
| [] -> ()
|
| [] -> ()
|
||||||
| [f] -> Format.fprintf fmt "%s" (Hex_encode.hex_of_bytes f)
|
| [f] -> Format.fprintf fmt "%s" (Hex_encode.hex_of_bytes f)
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
type fitness = MBytes.t list
|
type fitness = MBytes.t list
|
||||||
|
|
||||||
val compare: fitness -> fitness -> int
|
val compare: fitness -> fitness -> int
|
||||||
|
val equal: fitness -> fitness -> bool
|
||||||
val pp: Format.formatter -> fitness -> unit
|
val pp: Format.formatter -> fitness -> unit
|
||||||
val to_string: fitness -> string
|
val to_string: fitness -> string
|
||||||
|
|
||||||
|
@ -19,24 +19,22 @@ module type REGISTRED_PROTOCOL = sig
|
|||||||
val complete_b58prefix : Context.t -> string -> string list Lwt.t
|
val complete_b58prefix : Context.t -> string -> string list Lwt.t
|
||||||
end
|
end
|
||||||
|
|
||||||
type net_id = Store.net_id = Net of Block_hash.t
|
module Net_id = Store.Net_id
|
||||||
|
|
||||||
let net_id_encoding = Store.net_id_encoding
|
type shell_operation = Store.Operation.shell_header = {
|
||||||
|
net_id: Net_id.t ;
|
||||||
type shell_operation = Store.shell_operation = {
|
|
||||||
net_id: net_id ;
|
|
||||||
}
|
}
|
||||||
let shell_operation_encoding = Store.shell_operation_encoding
|
let shell_operation_encoding = Store.Operation.shell_header_encoding
|
||||||
|
|
||||||
type raw_operation = Store.operation = {
|
type raw_operation = Store.Operation.t = {
|
||||||
shell: shell_operation ;
|
shell: shell_operation ;
|
||||||
proto: MBytes.t ;
|
proto: MBytes.t ;
|
||||||
}
|
}
|
||||||
let raw_operation_encoding = Store.operation_encoding
|
let raw_operation_encoding = Store.Operation.encoding
|
||||||
|
|
||||||
(** The version agnostic toplevel structure of blocks. *)
|
(** The version agnostic toplevel structure of blocks. *)
|
||||||
type shell_block = Store.shell_block = {
|
type shell_block = Store.Block_header.shell_header = {
|
||||||
net_id: net_id ;
|
net_id: Net_id.t ;
|
||||||
(** The genesis of the chain this block belongs to. *)
|
(** The genesis of the chain this block belongs to. *)
|
||||||
predecessor: Block_hash.t ;
|
predecessor: Block_hash.t ;
|
||||||
(** The preceding block in the chain. *)
|
(** The preceding block in the chain. *)
|
||||||
@ -49,43 +47,43 @@ type shell_block = Store.shell_block = {
|
|||||||
operations: Operation_hash.t list ;
|
operations: Operation_hash.t list ;
|
||||||
(** The sequence of operations. *)
|
(** The sequence of operations. *)
|
||||||
}
|
}
|
||||||
let shell_block_encoding = Store.shell_block_encoding
|
let shell_block_encoding = Store.Block_header.shell_header_encoding
|
||||||
|
|
||||||
type raw_block = Store.block = {
|
type raw_block = Store.Block_header.t = {
|
||||||
shell: shell_block ;
|
shell: shell_block ;
|
||||||
proto: MBytes.t ;
|
proto: MBytes.t ;
|
||||||
}
|
}
|
||||||
let raw_block_encoding = Store.block_encoding
|
let raw_block_encoding = Store.Block_header.encoding
|
||||||
|
|
||||||
type 'error preapply_result = 'error Protocol.preapply_result = {
|
type 'error preapply_result = 'error Protocol.preapply_result = {
|
||||||
applied: Operation_hash.t list;
|
applied: Operation_hash.t list;
|
||||||
refused: 'error list Operation_hash_map.t;
|
refused: 'error list Operation_hash.Map.t;
|
||||||
branch_refused: 'error list Operation_hash_map.t;
|
branch_refused: 'error list Operation_hash.Map.t;
|
||||||
branch_delayed: 'error list Operation_hash_map.t;
|
branch_delayed: 'error list Operation_hash.Map.t;
|
||||||
}
|
}
|
||||||
|
|
||||||
let empty_result = {
|
let empty_result = {
|
||||||
applied = [] ;
|
applied = [] ;
|
||||||
refused = Operation_hash_map.empty ;
|
refused = Operation_hash.Map.empty ;
|
||||||
branch_refused = Operation_hash_map.empty ;
|
branch_refused = Operation_hash.Map.empty ;
|
||||||
branch_delayed = Operation_hash_map.empty ;
|
branch_delayed = Operation_hash.Map.empty ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let map_result f r = {
|
let map_result f r = {
|
||||||
applied = r.applied;
|
applied = r.applied;
|
||||||
refused = Operation_hash_map.map f r.refused ;
|
refused = Operation_hash.Map.map f r.refused ;
|
||||||
branch_refused = Operation_hash_map.map f r.branch_refused ;
|
branch_refused = Operation_hash.Map.map f r.branch_refused ;
|
||||||
branch_delayed = Operation_hash_map.map f r.branch_delayed ;
|
branch_delayed = Operation_hash.Map.map f r.branch_delayed ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let preapply_result_encoding error_encoding =
|
let preapply_result_encoding error_encoding =
|
||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
let refused_encoding = tup2 Operation_hash.encoding error_encoding in
|
let refused_encoding = tup2 Operation_hash.encoding error_encoding in
|
||||||
let build_list map = Operation_hash_map.bindings map in
|
let build_list map = Operation_hash.Map.bindings map in
|
||||||
let build_map list =
|
let build_map list =
|
||||||
List.fold_right
|
List.fold_right
|
||||||
(fun (k, e) m -> Operation_hash_map.add k e m)
|
(fun (k, e) m -> Operation_hash.Map.add k e m)
|
||||||
list Operation_hash_map.empty in
|
list Operation_hash.Map.empty in
|
||||||
conv
|
conv
|
||||||
(fun { applied ; refused ; branch_refused ; branch_delayed } ->
|
(fun { applied ; refused ; branch_refused ; branch_delayed } ->
|
||||||
(applied, build_list refused,
|
(applied, build_list refused,
|
||||||
@ -104,7 +102,7 @@ let preapply_result_encoding error_encoding =
|
|||||||
|
|
||||||
(** Version table *)
|
(** Version table *)
|
||||||
|
|
||||||
module VersionTable = Protocol_hash_table
|
module VersionTable = Protocol_hash.Table
|
||||||
|
|
||||||
let versions : ((module REGISTRED_PROTOCOL)) VersionTable.t =
|
let versions : ((module REGISTRED_PROTOCOL)) VersionTable.t =
|
||||||
VersionTable.create 20
|
VersionTable.create 20
|
||||||
@ -208,14 +206,14 @@ let compile hash units =
|
|||||||
let operations t =
|
let operations t =
|
||||||
let ops =
|
let ops =
|
||||||
List.fold_left
|
List.fold_left
|
||||||
(fun acc x -> Operation_hash_set.add x acc)
|
(fun acc x -> Operation_hash.Set.add x acc)
|
||||||
Operation_hash_set.empty t.applied in
|
Operation_hash.Set.empty t.applied in
|
||||||
let ops =
|
let ops =
|
||||||
Operation_hash_map.fold
|
Operation_hash.Map.fold
|
||||||
(fun x _ acc -> Operation_hash_set.add x acc)
|
(fun x _ acc -> Operation_hash.Set.add x acc)
|
||||||
t.branch_delayed ops in
|
t.branch_delayed ops in
|
||||||
let ops =
|
let ops =
|
||||||
Operation_hash_map.fold
|
Operation_hash.Map.fold
|
||||||
(fun x _ acc -> Operation_hash_set.add x acc)
|
(fun x _ acc -> Operation_hash.Set.add x acc)
|
||||||
t.branch_refused ops in
|
t.branch_refused ops in
|
||||||
ops
|
ops
|
||||||
|
@ -7,24 +7,25 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
type net_id = Store.net_id = Net of Block_hash.t
|
module Net_id : sig
|
||||||
|
type t = Store.Net_id.t
|
||||||
|
val encoding : t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
val net_id_encoding: net_id Data_encoding.t
|
type shell_operation = Store.Operation.shell_header = {
|
||||||
|
net_id: Net_id.t ;
|
||||||
type shell_operation = Store.shell_operation = {
|
|
||||||
net_id: net_id ;
|
|
||||||
}
|
}
|
||||||
val shell_operation_encoding: shell_operation Data_encoding.t
|
val shell_operation_encoding: shell_operation Data_encoding.t
|
||||||
|
|
||||||
type raw_operation = Store.operation = {
|
type raw_operation = Store.Operation.t = {
|
||||||
shell: shell_operation ;
|
shell: shell_operation ;
|
||||||
proto: MBytes.t ;
|
proto: MBytes.t ;
|
||||||
}
|
}
|
||||||
val raw_operation_encoding: raw_operation Data_encoding.t
|
val raw_operation_encoding: raw_operation Data_encoding.t
|
||||||
|
|
||||||
(** The version agnostic toplevel structure of blocks. *)
|
(** The version agnostic toplevel structure of blocks. *)
|
||||||
type shell_block = Store.shell_block = {
|
type shell_block = Store.Block_header.shell_header = {
|
||||||
net_id: net_id ;
|
net_id: Net_id.t ;
|
||||||
(** The genesis of the chain this block belongs to. *)
|
(** The genesis of the chain this block belongs to. *)
|
||||||
predecessor: Block_hash.t ;
|
predecessor: Block_hash.t ;
|
||||||
(** The preceding block in the chain. *)
|
(** The preceding block in the chain. *)
|
||||||
@ -39,7 +40,7 @@ type shell_block = Store.shell_block = {
|
|||||||
}
|
}
|
||||||
val shell_block_encoding: shell_block Data_encoding.t
|
val shell_block_encoding: shell_block Data_encoding.t
|
||||||
|
|
||||||
type raw_block = Store.block = {
|
type raw_block = Store.Block_header.t = {
|
||||||
shell: shell_block ;
|
shell: shell_block ;
|
||||||
proto: MBytes.t ;
|
proto: MBytes.t ;
|
||||||
}
|
}
|
||||||
@ -47,16 +48,16 @@ val raw_block_encoding: raw_block Data_encoding.t
|
|||||||
|
|
||||||
type 'error preapply_result = 'error Protocol.preapply_result = {
|
type 'error preapply_result = 'error Protocol.preapply_result = {
|
||||||
applied: Operation_hash.t list;
|
applied: Operation_hash.t list;
|
||||||
refused: 'error list Operation_hash_map.t; (* e.g. invalid signature. *)
|
refused: 'error list Operation_hash.Map.t; (* e.g. invalid signature. *)
|
||||||
branch_refused: 'error list Operation_hash_map.t; (* e.g. past account counter;
|
branch_refused: 'error list Operation_hash.Map.t; (* e.g. past account counter;
|
||||||
insufficent balance *)
|
insufficent balance *)
|
||||||
branch_delayed: 'error list Operation_hash_map.t; (* e.g. futur account counter. *)
|
branch_delayed: 'error list Operation_hash.Map.t; (* e.g. futur account counter. *)
|
||||||
}
|
}
|
||||||
|
|
||||||
val empty_result: 'error preapply_result
|
val empty_result: 'error preapply_result
|
||||||
val map_result: ('a list -> 'b list) -> 'a preapply_result -> 'b preapply_result
|
val map_result: ('a list -> 'b list) -> 'a preapply_result -> 'b preapply_result
|
||||||
|
|
||||||
val operations: 'error preapply_result -> Operation_hash_set.t
|
val operations: 'error preapply_result -> Operation_hash.Set.t
|
||||||
|
|
||||||
val preapply_result_encoding :
|
val preapply_result_encoding :
|
||||||
'error list Data_encoding.t ->
|
'error list Data_encoding.t ->
|
||||||
|
@ -20,7 +20,7 @@ let select_winning_proposal proposals =
|
|||||||
Some ([proposal], vote)
|
Some ([proposal], vote)
|
||||||
else
|
else
|
||||||
previous in
|
previous in
|
||||||
match Protocol_hash_map.fold merge proposals None with
|
match Protocol_hash.Map.fold merge proposals None with
|
||||||
| None -> None
|
| None -> None
|
||||||
| Some ([proposal], _) -> Some proposal
|
| Some ([proposal], _) -> Some proposal
|
||||||
| Some _ -> None (* in case of a tie, lets do nothing. *)
|
| Some _ -> None (* in case of a tie, lets do nothing. *)
|
||||||
|
@ -248,9 +248,9 @@ let apply ctxt accept_failing_script block operations =
|
|||||||
|
|
||||||
let empty_result =
|
let empty_result =
|
||||||
{ Updater.applied = [];
|
{ Updater.applied = [];
|
||||||
refused = Operation_hash_map.empty;
|
refused = Operation_hash.Map.empty;
|
||||||
branch_refused = Operation_hash_map.empty;
|
branch_refused = Operation_hash.Map.empty;
|
||||||
branch_delayed = Operation_hash_map.empty;
|
branch_delayed = Operation_hash.Map.empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
let compare_operations op1 op2 =
|
let compare_operations op1 op2 =
|
||||||
@ -276,9 +276,9 @@ let merge_result r r' =
|
|||||||
| Some x, None -> Some x
|
| Some x, None -> Some x
|
||||||
| _, Some y -> Some y in
|
| _, Some y -> Some y in
|
||||||
{ applied = r.applied @ r'.applied ;
|
{ applied = r.applied @ r'.applied ;
|
||||||
refused = Operation_hash_map.merge merge r.refused r'.refused ;
|
refused = Operation_hash.Map.merge merge r.refused r'.refused ;
|
||||||
branch_refused =
|
branch_refused =
|
||||||
Operation_hash_map.merge merge r.branch_refused r'.branch_refused ;
|
Operation_hash.Map.merge merge r.branch_refused r'.branch_refused ;
|
||||||
branch_delayed = r'.branch_delayed ;
|
branch_delayed = r'.branch_delayed ;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -296,15 +296,15 @@ let prevalidate ctxt pred_block sort operations =
|
|||||||
match classify_errors errors with
|
match classify_errors errors with
|
||||||
| `Branch ->
|
| `Branch ->
|
||||||
let branch_refused =
|
let branch_refused =
|
||||||
Operation_hash_map.add op.hash errors r.Updater.branch_refused in
|
Operation_hash.Map.add op.hash errors r.Updater.branch_refused in
|
||||||
Lwt.return (ctxt, { r with Updater.branch_refused })
|
Lwt.return (ctxt, { r with Updater.branch_refused })
|
||||||
| `Permanent ->
|
| `Permanent ->
|
||||||
let refused =
|
let refused =
|
||||||
Operation_hash_map.add op.hash errors r.Updater.refused in
|
Operation_hash.Map.add op.hash errors r.Updater.refused in
|
||||||
Lwt.return (ctxt, { r with Updater.refused })
|
Lwt.return (ctxt, { r with Updater.refused })
|
||||||
| `Temporary ->
|
| `Temporary ->
|
||||||
let branch_delayed =
|
let branch_delayed =
|
||||||
Operation_hash_map.add op.hash errors r.Updater.branch_delayed in
|
Operation_hash.Map.add op.hash errors r.Updater.branch_delayed in
|
||||||
Lwt.return (ctxt, { r with Updater.branch_delayed }))
|
Lwt.return (ctxt, { r with Updater.branch_delayed }))
|
||||||
(ctxt, empty_result)
|
(ctxt, empty_result)
|
||||||
operations >>= fun (ctxt, r) ->
|
operations >>= fun (ctxt, r) ->
|
||||||
@ -312,7 +312,7 @@ let prevalidate ctxt pred_block sort operations =
|
|||||||
| _ :: _ when sort ->
|
| _ :: _ when sort ->
|
||||||
let rechecked_operations =
|
let rechecked_operations =
|
||||||
List.filter
|
List.filter
|
||||||
(fun op -> Operation_hash_map.mem op.hash r.Updater.branch_delayed)
|
(fun op -> Operation_hash.Map.mem op.hash r.Updater.branch_delayed)
|
||||||
operations in
|
operations in
|
||||||
loop ctxt rechecked_operations >>=? fun (ctxt, r') ->
|
loop ctxt rechecked_operations >>=? fun (ctxt, r') ->
|
||||||
return (ctxt, merge_result r r')
|
return (ctxt, merge_result r r')
|
||||||
|
@ -25,7 +25,7 @@ let state_hash_encoding =
|
|||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
conv
|
conv
|
||||||
State_hash.to_bytes
|
State_hash.to_bytes
|
||||||
State_hash.of_bytes
|
State_hash.of_bytes_exn
|
||||||
(Fixed.bytes Nonce_hash.size)
|
(Fixed.bytes Nonce_hash.size)
|
||||||
|
|
||||||
let seed_encoding =
|
let seed_encoding =
|
||||||
|
@ -565,7 +565,7 @@ module Helpers = struct
|
|||||||
~description: "Forge a block header"
|
~description: "Forge a block header"
|
||||||
~input:
|
~input:
|
||||||
(obj9
|
(obj9
|
||||||
(req "net_id" Updater.net_id_encoding)
|
(req "net_id" Updater.Net_id.encoding)
|
||||||
(req "predecessor" Block_hash.encoding)
|
(req "predecessor" Block_hash.encoding)
|
||||||
(req "timestamp" Timestamp.encoding)
|
(req "timestamp" Timestamp.encoding)
|
||||||
(req "fitness" Fitness.encoding)
|
(req "fitness" Fitness.encoding)
|
||||||
|
@ -489,7 +489,7 @@ module Rewards = struct
|
|||||||
Raw_make_iterable_data_storage(struct
|
Raw_make_iterable_data_storage(struct
|
||||||
type t = Ed25519.Public_key_hash.t * Cycle_repr.t
|
type t = Ed25519.Public_key_hash.t * Cycle_repr.t
|
||||||
let prefix = Key.rewards
|
let prefix = Key.rewards
|
||||||
let length = Ed25519.Public_key_hash.path_len + 1
|
let length = Ed25519.Public_key_hash.path_length + 1
|
||||||
let to_path (pkh, c) =
|
let to_path (pkh, c) =
|
||||||
Ed25519.Public_key_hash.to_path pkh @
|
Ed25519.Public_key_hash.to_path pkh @
|
||||||
[Int32.to_string (Cycle_repr.to_int32 c)]
|
[Int32.to_string (Cycle_repr.to_int32 c)]
|
||||||
@ -497,7 +497,7 @@ module Rewards = struct
|
|||||||
match List.rev p with
|
match List.rev p with
|
||||||
| [] -> assert false
|
| [] -> assert false
|
||||||
| cycle :: rev_pkh ->
|
| cycle :: rev_pkh ->
|
||||||
(Ed25519.Public_key_hash.of_path (List.rev rev_pkh),
|
(Ed25519.Public_key_hash.of_path_exn (List.rev rev_pkh),
|
||||||
Cycle_repr.of_int32_exn @@ Int32.of_string cycle)
|
Cycle_repr.of_int32_exn @@ Int32.of_string cycle)
|
||||||
let compare (pkh1, c1) (pkh2, c2) =
|
let compare (pkh1, c1) (pkh2, c2) =
|
||||||
let cmp1 = Ed25519.Public_key_hash.compare pkh1 pkh2 in
|
let cmp1 = Ed25519.Public_key_hash.compare pkh1 pkh2 in
|
||||||
|
@ -207,8 +207,9 @@ module Make_data_set_storage (P : Single_data_description) = struct
|
|||||||
let title = ("A " ^ P.name ^ "key")
|
let title = ("A " ^ P.name ^ "key")
|
||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
|
let of_path = of_path_exn
|
||||||
let prefix = P.key
|
let prefix = P.key
|
||||||
let length = path_len
|
let length = path_length
|
||||||
end
|
end
|
||||||
|
|
||||||
module HashTbl =
|
module HashTbl =
|
||||||
@ -349,13 +350,14 @@ end
|
|||||||
module Make_iterable_data_storage (H: HASH) (P: Single_data_description) =
|
module Make_iterable_data_storage (H: HASH) (P: Single_data_description) =
|
||||||
Raw_make_iterable_data_storage(struct
|
Raw_make_iterable_data_storage(struct
|
||||||
include H
|
include H
|
||||||
|
let of_path = H.of_path_exn
|
||||||
let prefix = P.key
|
let prefix = P.key
|
||||||
let length = path_len
|
let length = path_length
|
||||||
end)(P)
|
end)(P)
|
||||||
|
|
||||||
let register_resolvers (module H : Hash.HASH) prefixes =
|
let register_resolvers (module H : Hash.HASH) prefixes =
|
||||||
|
|
||||||
let module Set = Hash_set(H) in
|
let module Set = H.Set in
|
||||||
|
|
||||||
let resolvers =
|
let resolvers =
|
||||||
List.map
|
List.map
|
||||||
|
@ -18,12 +18,8 @@ type t
|
|||||||
type context = t
|
type context = t
|
||||||
|
|
||||||
module Contract_hash = Tezos_hash.Contract_hash
|
module Contract_hash = Tezos_hash.Contract_hash
|
||||||
module Contract_hash_set = Tezos_hash.Contract_hash_set
|
|
||||||
module Contract_hash_map = Tezos_hash.Contract_hash_map
|
|
||||||
|
|
||||||
module Nonce_hash = Tezos_hash.Nonce_hash
|
module Nonce_hash = Tezos_hash.Nonce_hash
|
||||||
module Nonce_hash_set = Tezos_hash.Nonce_hash_set
|
|
||||||
module Nonce_hash_map = Tezos_hash.Nonce_hash_map
|
|
||||||
|
|
||||||
type public_key = Ed25519.public_key
|
type public_key = Ed25519.public_key
|
||||||
type public_key_hash = Ed25519.Public_key_hash.t
|
type public_key_hash = Ed25519.Public_key_hash.t
|
||||||
@ -392,7 +388,7 @@ module Vote : sig
|
|||||||
context -> Protocol_hash.t -> public_key_hash ->
|
context -> Protocol_hash.t -> public_key_hash ->
|
||||||
context tzresult Lwt.t
|
context tzresult Lwt.t
|
||||||
val get_proposals:
|
val get_proposals:
|
||||||
context -> int32 Protocol_hash_map.t tzresult Lwt.t
|
context -> int32 Protocol_hash.Map.t tzresult Lwt.t
|
||||||
val clear_proposals: context -> context tzresult Lwt.t
|
val clear_proposals: context -> context tzresult Lwt.t
|
||||||
|
|
||||||
val freeze_listings: context -> context tzresult Lwt.t
|
val freeze_listings: context -> context tzresult Lwt.t
|
||||||
|
@ -25,8 +25,6 @@ module State_hash = Hash.Make_Blake2B(Base58)(struct
|
|||||||
let b58check_prefix = Prefix.random_state_hash
|
let b58check_prefix = Prefix.random_state_hash
|
||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
module State_hash_set = Hash_set(State_hash)
|
|
||||||
module State_hash_map = Hash_map(State_hash)
|
|
||||||
|
|
||||||
module Nonce_hash = Hash.Make_Blake2B(Base58)(struct
|
module Nonce_hash = Hash.Make_Blake2B(Base58)(struct
|
||||||
let name = "cycle_nonce"
|
let name = "cycle_nonce"
|
||||||
@ -34,8 +32,6 @@ module Nonce_hash = Hash.Make_Blake2B(Base58)(struct
|
|||||||
let b58check_prefix = Prefix.nonce_hash
|
let b58check_prefix = Prefix.nonce_hash
|
||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
module Nonce_hash_set = Hash_set(Nonce_hash)
|
|
||||||
module Nonce_hash_map = Hash_map(Nonce_hash)
|
|
||||||
|
|
||||||
module Script_expr_hash = Hash.Make_Blake2B(Base58)(struct
|
module Script_expr_hash = Hash.Make_Blake2B(Base58)(struct
|
||||||
let name = "script_expr"
|
let name = "script_expr"
|
||||||
@ -43,8 +39,6 @@ module Script_expr_hash = Hash.Make_Blake2B(Base58)(struct
|
|||||||
let b58check_prefix = Prefix.script_expr_hash
|
let b58check_prefix = Prefix.script_expr_hash
|
||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
module Script_expr_hash_set = Hash_set(Script_expr_hash)
|
|
||||||
module Script_expr_hash_map = Hash_map(Script_expr_hash)
|
|
||||||
|
|
||||||
module Contract_hash = Hash.Make_Blake2B(Base58)(struct
|
module Contract_hash = Hash.Make_Blake2B(Base58)(struct
|
||||||
let name = "Contract_hash"
|
let name = "Contract_hash"
|
||||||
@ -52,8 +46,6 @@ module Contract_hash = Hash.Make_Blake2B(Base58)(struct
|
|||||||
let b58check_prefix = Prefix.contract_hash
|
let b58check_prefix = Prefix.contract_hash
|
||||||
let size = Some 20
|
let size = Some 20
|
||||||
end)
|
end)
|
||||||
module Contract_hash_set = Hash_set(Contract_hash)
|
|
||||||
module Contract_hash_map = Hash_map(Contract_hash)
|
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
Base58.check_encoded_prefix Contract_hash.b58check_encoding "TZ1" 36 ;
|
Base58.check_encoded_prefix Contract_hash.b58check_encoding "TZ1" 36 ;
|
||||||
|
@ -11,12 +11,12 @@ let record_proposal ctxt delegate proposal =
|
|||||||
Storage.Vote.Proposals.add ctxt (delegate, proposal)
|
Storage.Vote.Proposals.add ctxt (delegate, proposal)
|
||||||
|
|
||||||
let get_proposals ctxt =
|
let get_proposals ctxt =
|
||||||
Storage.Vote.Proposals.fold ctxt Protocol_hash_map.empty
|
Storage.Vote.Proposals.fold ctxt Protocol_hash.Map.empty
|
||||||
~f:(fun (proposal, _delegate) acc ->
|
~f:(fun (proposal, _delegate) acc ->
|
||||||
let previous =
|
let previous =
|
||||||
try Protocol_hash_map.find proposal acc
|
try Protocol_hash.Map.find proposal acc
|
||||||
with Not_found -> 0l in
|
with Not_found -> 0l in
|
||||||
Lwt.return (Protocol_hash_map.add proposal (Int32.succ previous) acc))
|
Lwt.return (Protocol_hash.Map.add proposal (Int32.succ previous) acc))
|
||||||
|
|
||||||
let clear_proposals ctxt =
|
let clear_proposals ctxt =
|
||||||
Storage.Vote.Proposals.clear ctxt
|
Storage.Vote.Proposals.clear ctxt
|
||||||
|
@ -12,7 +12,7 @@ val record_proposal:
|
|||||||
Storage.t tzresult Lwt.t
|
Storage.t tzresult Lwt.t
|
||||||
|
|
||||||
val get_proposals:
|
val get_proposals:
|
||||||
Storage.t -> int32 Protocol_hash_map.t tzresult Lwt.t
|
Storage.t -> int32 Protocol_hash.Map.t tzresult Lwt.t
|
||||||
|
|
||||||
val clear_proposals: Storage.t -> Storage.t tzresult Lwt.t
|
val clear_proposals: Storage.t -> Storage.t tzresult Lwt.t
|
||||||
|
|
||||||
|
@ -56,9 +56,9 @@ let preapply context _block_pred _timestamp _sort operations =
|
|||||||
(Ok
|
(Ok
|
||||||
(context,
|
(context,
|
||||||
{ Updater.applied = List.map (fun h -> h) operations;
|
{ Updater.applied = List.map (fun h -> h) operations;
|
||||||
refused = Operation_hash_map.empty;
|
refused = Operation_hash.Map.empty;
|
||||||
branch_delayed = Operation_hash_map.empty;
|
branch_delayed = Operation_hash.Map.empty;
|
||||||
branch_refused = Operation_hash_map.empty;
|
branch_refused = Operation_hash.Map.empty;
|
||||||
}))
|
}))
|
||||||
|
|
||||||
let rpc_services = Services.rpc_services
|
let rpc_services = Services.rpc_services
|
||||||
|
@ -21,18 +21,28 @@ module type MINIMAL_HASH = sig
|
|||||||
val size: int (* in bytes *)
|
val size: int (* in bytes *)
|
||||||
val compare: t -> t -> int
|
val compare: t -> t -> int
|
||||||
val equal: t -> t -> bool
|
val equal: t -> t -> bool
|
||||||
val of_hex: string -> t
|
|
||||||
val to_hex: t -> string
|
val to_hex: t -> string
|
||||||
val of_string: string -> t
|
val of_hex: string -> t option
|
||||||
|
val of_hex_exn: string -> t
|
||||||
|
|
||||||
val to_string: t -> string
|
val to_string: t -> string
|
||||||
|
val of_string: string -> t option
|
||||||
|
val of_string_exn: string -> t
|
||||||
|
|
||||||
val to_bytes: t -> MBytes.t
|
val to_bytes: t -> MBytes.t
|
||||||
val of_bytes: MBytes.t -> t
|
val of_bytes: MBytes.t -> t option
|
||||||
|
val of_bytes_exn: MBytes.t -> t
|
||||||
|
|
||||||
val read: MBytes.t -> int -> t
|
val read: MBytes.t -> int -> t
|
||||||
val write: MBytes.t -> int -> t -> unit
|
val write: MBytes.t -> int -> t -> unit
|
||||||
|
|
||||||
val to_path: t -> string list
|
val to_path: t -> string list
|
||||||
val of_path: string list -> t
|
val of_path: string list -> t option
|
||||||
|
val of_path_exn: string list -> t
|
||||||
|
|
||||||
val prefix_path: string -> string list
|
val prefix_path: string -> string list
|
||||||
val path_len: int
|
val path_length: int
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -49,6 +59,16 @@ module type HASH = sig
|
|||||||
type Base58.data += Hash of t
|
type Base58.data += Hash of t
|
||||||
val b58check_encoding: t Base58.encoding
|
val b58check_encoding: t Base58.encoding
|
||||||
|
|
||||||
|
module Set : sig
|
||||||
|
include Set.S with type elt = t
|
||||||
|
val encoding: t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module Map : sig
|
||||||
|
include Map.S with type key = t
|
||||||
|
val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** {2 Building Hashes} *******************************************************)
|
(** {2 Building Hashes} *******************************************************)
|
||||||
@ -83,31 +103,13 @@ module Make_Blake2B
|
|||||||
end)
|
end)
|
||||||
(Name : PrefixedName) : HASH
|
(Name : PrefixedName) : HASH
|
||||||
|
|
||||||
(** Builds a Set of values of some Hash type. *)
|
|
||||||
module Hash_set (Hash : HASH) : sig
|
|
||||||
include Set.S with type elt = Hash.t
|
|
||||||
val encoding: t Data_encoding.t
|
|
||||||
end
|
|
||||||
|
|
||||||
(** Builds a Map using some Hash type as keys. *)
|
|
||||||
module Hash_map (Hash : HASH) : sig
|
|
||||||
include Map.S with type key = Hash.t
|
|
||||||
val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t
|
|
||||||
end
|
|
||||||
|
|
||||||
(** {2 Predefined Hashes } ****************************************************)
|
(** {2 Predefined Hashes } ****************************************************)
|
||||||
|
|
||||||
(** Blocks hashes / IDs. *)
|
(** Blocks hashes / IDs. *)
|
||||||
module Block_hash : HASH
|
module Block_hash : HASH
|
||||||
module Block_hash_set : Set.S with type elt = Block_hash.t
|
|
||||||
module Block_hash_map : module type of Hash_map (Block_hash)
|
|
||||||
|
|
||||||
(** Operations hashes / IDs. *)
|
(** Operations hashes / IDs. *)
|
||||||
module Operation_hash : HASH
|
module Operation_hash : HASH
|
||||||
module Operation_hash_set : Set.S with type elt = Operation_hash.t
|
|
||||||
module Operation_hash_map : module type of Hash_map (Operation_hash)
|
|
||||||
|
|
||||||
(** Protocol versions / source hashes. *)
|
(** Protocol versions / source hashes. *)
|
||||||
module Protocol_hash : HASH
|
module Protocol_hash : HASH
|
||||||
module Protocol_hash_set : Set.S with type elt = Protocol_hash.t
|
|
||||||
module Protocol_hash_map : module type of Hash_map (Protocol_hash)
|
|
||||||
|
@ -19,7 +19,6 @@ module type STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** Projection of OCaml keys of some abstract type to concrete storage
|
(** Projection of OCaml keys of some abstract type to concrete storage
|
||||||
@ -59,8 +58,6 @@ module type BYTES_STORE = sig
|
|||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
val list: t -> key list -> key list Lwt.t
|
val list: t -> key list -> key list Lwt.t
|
||||||
val remove_rec: t -> key -> t Lwt.t
|
val remove_rec: t -> key -> t Lwt.t
|
||||||
|
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module MakeBytesStore (S : STORE) (K : KEY) :
|
module MakeBytesStore (S : STORE) (K : KEY) :
|
||||||
@ -77,8 +74,6 @@ module type TYPED_STORE = sig
|
|||||||
val get: t -> key -> value option Lwt.t
|
val get: t -> key -> value option Lwt.t
|
||||||
val set: t -> key -> value -> t Lwt.t
|
val set: t -> key -> value -> t Lwt.t
|
||||||
val del: t -> key -> t Lwt.t
|
val del: t -> key -> t Lwt.t
|
||||||
|
|
||||||
val keys: t -> key list Lwt.t
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(** Gives a typed view of a store (values of a given type stored under
|
(** Gives a typed view of a store (values of a given type stored under
|
||||||
|
@ -2,11 +2,13 @@
|
|||||||
|
|
||||||
open Hash
|
open Hash
|
||||||
|
|
||||||
type net_id
|
module Net_id : sig
|
||||||
val net_id_encoding: net_id Data_encoding.t
|
type t
|
||||||
|
val encoding : t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
type shell_operation = {
|
type shell_operation = {
|
||||||
net_id: net_id ;
|
net_id: Net_id.t ;
|
||||||
}
|
}
|
||||||
val shell_operation_encoding: shell_operation Data_encoding.t
|
val shell_operation_encoding: shell_operation Data_encoding.t
|
||||||
|
|
||||||
@ -18,7 +20,7 @@ type raw_operation = {
|
|||||||
|
|
||||||
(** The version agnostic toplevel structure of blocks. *)
|
(** The version agnostic toplevel structure of blocks. *)
|
||||||
type shell_block = {
|
type shell_block = {
|
||||||
net_id: net_id ;
|
net_id: Net_id.t ;
|
||||||
(** The genesis of the chain this block belongs to. *)
|
(** The genesis of the chain this block belongs to. *)
|
||||||
predecessor: Block_hash.t ;
|
predecessor: Block_hash.t ;
|
||||||
(** The preceding block in the chain. *)
|
(** The preceding block in the chain. *)
|
||||||
@ -43,14 +45,14 @@ type raw_block = {
|
|||||||
type 'error preapply_result =
|
type 'error preapply_result =
|
||||||
{ applied: Operation_hash.t list;
|
{ applied: Operation_hash.t list;
|
||||||
(** Operations that where successfully applied. *)
|
(** Operations that where successfully applied. *)
|
||||||
refused: 'error list Operation_hash_map.t;
|
refused: 'error list Operation_hash.Map.t;
|
||||||
(** Operations which triggered a context independent, unavoidable
|
(** Operations which triggered a context independent, unavoidable
|
||||||
error (e.g. invalid signature). *)
|
error (e.g. invalid signature). *)
|
||||||
branch_refused: 'error list Operation_hash_map.t;
|
branch_refused: 'error list Operation_hash.Map.t;
|
||||||
(** Operations which triggered an error that might not arise in a
|
(** Operations which triggered an error that might not arise in a
|
||||||
different context (e.g. past account counter, insufficent
|
different context (e.g. past account counter, insufficent
|
||||||
balance). *)
|
balance). *)
|
||||||
branch_delayed: 'error list Operation_hash_map.t;
|
branch_delayed: 'error list Operation_hash.Map.t;
|
||||||
(** Operations which triggered an error that might not arise in a
|
(** Operations which triggered an error that might not arise in a
|
||||||
future update of this context (e.g. futur account counter). *) }
|
future update of this context (e.g. futur account counter). *) }
|
||||||
|
|
||||||
@ -132,7 +134,7 @@ type component = {
|
|||||||
|
|
||||||
(** Takes a version hash, a list of OCaml components in compilation
|
(** Takes a version hash, a list of OCaml components in compilation
|
||||||
order. The last element must be named [protocol] and respect the
|
order. The last element must be named [protocol] and respect the
|
||||||
[protocol.mli] interface. Tries to compile it and returns true
|
[protocol.ml] interface. Tries to compile it and returns true
|
||||||
if the operation was successful. *)
|
if the operation was successful. *)
|
||||||
val compile : Protocol_hash.t -> component list -> bool Lwt.t
|
val compile : Protocol_hash.t -> component list -> bool Lwt.t
|
||||||
|
|
||||||
|
118
src/utils/IO.ml
118
src/utils/IO.ml
@ -1,14 +1,6 @@
|
|||||||
(**************************************************************************)
|
(* For this source file only.
|
||||||
(* *)
|
|
||||||
(* Copyright (c) 2014 - 2016. *)
|
|
||||||
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
|
||||||
(* *)
|
|
||||||
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
|
||||||
(* *)
|
|
||||||
(**************************************************************************)
|
|
||||||
|
|
||||||
(*
|
|
||||||
* Copyright (c) 2013-2014 Thomas Gazagnaire <thomas@gazagnaire.org>
|
* Copyright (c) 2013-2014 Thomas Gazagnaire <thomas@gazagnaire.org>
|
||||||
|
* Copyright (c) 2016 Dynamic Ledger Solutions, Inc. <contact@tezos.com>
|
||||||
*
|
*
|
||||||
* Permission to use, copy, modify, and distribute this software for any
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
* purpose with or without fee is hereby granted, provided that the above
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
@ -23,17 +15,7 @@
|
|||||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
*)
|
*)
|
||||||
|
|
||||||
let (>>=) = Lwt.(>>=)
|
open Error_monad
|
||||||
let (>|=) = Lwt.(>|=)
|
|
||||||
let (//) = Filename.concat
|
|
||||||
|
|
||||||
exception Error of string
|
|
||||||
|
|
||||||
let error =
|
|
||||||
Printf.ksprintf
|
|
||||||
(fun str ->
|
|
||||||
Printf.eprintf "fatal: %s\n%!" str;
|
|
||||||
Lwt.fail (Error str))
|
|
||||||
|
|
||||||
let mkdir dir =
|
let mkdir dir =
|
||||||
let safe_mkdir dir =
|
let safe_mkdir dir =
|
||||||
@ -49,12 +31,12 @@ let mkdir dir =
|
|||||||
|
|
||||||
let check_dir root =
|
let check_dir root =
|
||||||
if Sys.file_exists root && not (Sys.is_directory root) then
|
if Sys.file_exists root && not (Sys.is_directory root) then
|
||||||
error "%s is not a directory!" root
|
failwith "%s is not a directory!" root
|
||||||
else begin
|
else begin
|
||||||
let mkdir dir =
|
let mkdir dir =
|
||||||
if not (Sys.file_exists dir) then mkdir dir in
|
if not (Sys.file_exists dir) then mkdir dir in
|
||||||
mkdir root;
|
mkdir root;
|
||||||
Lwt.return_unit
|
return ()
|
||||||
end
|
end
|
||||||
|
|
||||||
let files = Lwt_pool.create 50 (fun () -> Lwt.return_unit)
|
let files = Lwt_pool.create 50 (fun () -> Lwt.return_unit)
|
||||||
@ -90,7 +72,8 @@ let with_file_out file ba =
|
|||||||
mkdir (Filename.dirname file);
|
mkdir (Filename.dirname file);
|
||||||
with_file
|
with_file
|
||||||
(fun () ->
|
(fun () ->
|
||||||
Lwt_unix.(openfile file [O_RDWR; O_NONBLOCK; O_CREAT] 0o644) >>= fun fd ->
|
Lwt_unix.(openfile file
|
||||||
|
[O_RDWR; O_NONBLOCK; O_CREAT] 0o644) >>= fun fd ->
|
||||||
try
|
try
|
||||||
write_bigstring fd ba >>= fun r ->
|
write_bigstring fd ba >>= fun r ->
|
||||||
Lwt_unix.close fd >>= fun () ->
|
Lwt_unix.close fd >>= fun () ->
|
||||||
@ -99,58 +82,51 @@ let with_file_out file ba =
|
|||||||
Lwt_unix.close fd >>= fun () ->
|
Lwt_unix.close fd >>= fun () ->
|
||||||
Lwt.fail e)
|
Lwt.fail e)
|
||||||
|
|
||||||
let remove_file file =
|
|
||||||
if Sys.file_exists file then Unix.unlink file;
|
|
||||||
Lwt.return_unit
|
|
||||||
|
|
||||||
let is_directory f =
|
let is_directory f =
|
||||||
try Sys.is_directory f with _ -> false
|
try Sys.is_directory f with _ -> false
|
||||||
|
|
||||||
let list_files root =
|
let is_empty dir =
|
||||||
let files = Lwt_unix.files_of_directory root in
|
Lwt_unix.opendir dir >>= fun hdir ->
|
||||||
Lwt_stream.fold_s
|
Lwt_unix.readdir_n hdir 3 >>= fun files ->
|
||||||
(fun file accu ->
|
let res = Array.length files = 2 in
|
||||||
if file = "." || file = ".." then
|
Lwt_unix.closedir hdir >>= fun () ->
|
||||||
Lwt.return accu
|
Lwt.return res
|
||||||
else
|
|
||||||
Lwt.return (file :: accu))
|
|
||||||
files [] >>= fun l ->
|
|
||||||
Lwt.return (List.sort compare l)
|
|
||||||
|
|
||||||
let rec_files root =
|
let rec cleanup_dir dir =
|
||||||
let rec aux accu dir =
|
Lwt_unix.file_exists dir >>= function
|
||||||
let files = Lwt_unix.files_of_directory (root // dir) in
|
| true ->
|
||||||
|
is_empty dir >>= fun empty ->
|
||||||
|
if empty && dir <> "/" then begin
|
||||||
|
Lwt_unix.rmdir dir >>= fun () ->
|
||||||
|
cleanup_dir (Filename.dirname dir)
|
||||||
|
end else
|
||||||
|
Lwt.return_unit
|
||||||
|
| false ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let remove_file ?(cleanup = false) file =
|
||||||
|
Lwt_unix.file_exists file >>= function
|
||||||
|
| true ->
|
||||||
|
Lwt_unix.unlink file >>= fun () ->
|
||||||
|
if cleanup then
|
||||||
|
Lwt.catch
|
||||||
|
(fun () -> cleanup_dir (Filename.dirname file))
|
||||||
|
(fun _ -> Lwt.return_unit)
|
||||||
|
else
|
||||||
|
Lwt.return_unit
|
||||||
|
| false ->
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
let fold root ~init ~f =
|
||||||
|
if is_directory root then begin
|
||||||
|
let files = Lwt_unix.files_of_directory root in
|
||||||
Lwt_stream.fold_s
|
Lwt_stream.fold_s
|
||||||
(fun file accu ->
|
(fun file acc ->
|
||||||
if file = "." || file = ".." then
|
if file = "." || file = ".." then
|
||||||
Lwt.return accu
|
Lwt.return acc
|
||||||
else
|
else
|
||||||
let file = if dir = "" then file else dir // file in
|
f file acc)
|
||||||
if is_directory (root // file) then
|
files init
|
||||||
aux accu file
|
end else
|
||||||
else
|
Lwt.return init
|
||||||
Lwt.return (file :: accu))
|
|
||||||
files accu in
|
|
||||||
aux [] ""
|
|
||||||
|
|
||||||
let remove_rec root =
|
|
||||||
let rec aux dir =
|
|
||||||
let files = Lwt_unix.files_of_directory (root // dir) in
|
|
||||||
Lwt_stream.iter_s
|
|
||||||
(fun file ->
|
|
||||||
if file = "." || file = ".." then
|
|
||||||
Lwt.return_unit
|
|
||||||
else
|
|
||||||
let file = if dir = "" then file else dir // file in
|
|
||||||
if is_directory (root // file) then begin
|
|
||||||
aux file >>= fun () ->
|
|
||||||
Lwt.return_unit
|
|
||||||
end else begin
|
|
||||||
Unix.unlink (root // file) ;
|
|
||||||
Lwt.return_unit
|
|
||||||
end)
|
|
||||||
files >>= fun () ->
|
|
||||||
Unix.rmdir (root // dir) ;
|
|
||||||
Lwt.return_unit
|
|
||||||
in
|
|
||||||
if Sys.file_exists root then aux "" else Lwt.return_unit
|
|
||||||
|
@ -7,28 +7,17 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
(*
|
open Error_monad
|
||||||
* Copyright (c) 2013-2014 Thomas Gazagnaire <thomas@gazagnaire.org>
|
|
||||||
*
|
|
||||||
* Permission to use, copy, modify, and distribute this software for any
|
|
||||||
* purpose with or without fee is hereby granted, provided that the above
|
|
||||||
* copyright notice and this permission notice appear in all copies.
|
|
||||||
*
|
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
||||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
||||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
||||||
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
||||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open Utils
|
val mkdir: string -> unit
|
||||||
|
|
||||||
|
val check_dir: string -> unit tzresult Lwt.t
|
||||||
|
val is_directory: string -> bool
|
||||||
|
|
||||||
val check_dir: string -> unit Lwt.t
|
|
||||||
val with_file_in: string -> (MBytes.t -> 'a Lwt.t) -> 'a Lwt.t
|
val with_file_in: string -> (MBytes.t -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
val list_files: string -> string list Lwt.t
|
|
||||||
val rec_files: string -> string list Lwt.t
|
|
||||||
val with_file_out: string -> MBytes.t -> unit Lwt.t
|
val with_file_out: string -> MBytes.t -> unit Lwt.t
|
||||||
val remove_file: string -> unit Lwt.t
|
|
||||||
val remove_rec: string -> unit Lwt.t
|
val remove_file: ?cleanup:bool -> string -> unit Lwt.t
|
||||||
|
|
||||||
|
val fold: string -> init:'a -> f:(string -> 'a -> 'a Lwt.t) -> 'a Lwt.t
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ val make_target : float -> target
|
|||||||
|
|
||||||
type secret_key
|
type secret_key
|
||||||
type public_key
|
type public_key
|
||||||
module Public_key_hash : Hash.HASH
|
module Public_key_hash : Hash.INTERNAL_HASH
|
||||||
type channel_key
|
type channel_key
|
||||||
|
|
||||||
val public_key_encoding : public_key Data_encoding.t
|
val public_key_encoding : public_key Data_encoding.t
|
||||||
|
@ -30,9 +30,8 @@ let from_stream (stream: string Lwt_stream.t) =
|
|||||||
let json = Ezjsonm.from_string !buffer in
|
let json = Ezjsonm.from_string !buffer in
|
||||||
buffer := "" ;
|
buffer := "" ;
|
||||||
Some (Ok json)
|
Some (Ok json)
|
||||||
with Ezjsonm.Parse_error (_, msg) ->
|
with Ezjsonm.Parse_error _ ->
|
||||||
if String.length str = 32 * 1024 then None
|
None)
|
||||||
else Some (Error msg))
|
|
||||||
stream
|
stream
|
||||||
|
|
||||||
let write_file file json =
|
let write_file file json =
|
||||||
|
@ -38,19 +38,34 @@ module type MINIMAL_HASH = sig
|
|||||||
val size: int (* in bytes *)
|
val size: int (* in bytes *)
|
||||||
val compare: t -> t -> int
|
val compare: t -> t -> int
|
||||||
val equal: t -> t -> bool
|
val equal: t -> t -> bool
|
||||||
val of_hex: string -> t
|
|
||||||
val to_hex: t -> string
|
val to_hex: t -> string
|
||||||
val of_string: string -> t
|
val of_hex: string -> t option
|
||||||
|
val of_hex_exn: string -> t
|
||||||
|
|
||||||
val to_string: t -> string
|
val to_string: t -> string
|
||||||
|
val of_string: string -> t option
|
||||||
|
val of_string_exn: string -> t
|
||||||
|
|
||||||
val to_bytes: t -> MBytes.t
|
val to_bytes: t -> MBytes.t
|
||||||
val of_bytes: MBytes.t -> t
|
val of_bytes: MBytes.t -> t option
|
||||||
|
val of_bytes_exn: MBytes.t -> t
|
||||||
|
|
||||||
val read: MBytes.t -> int -> t
|
val read: MBytes.t -> int -> t
|
||||||
val write: MBytes.t -> int -> t -> unit
|
val write: MBytes.t -> int -> t -> unit
|
||||||
val to_path: t -> string list
|
|
||||||
val of_path: string list -> t
|
|
||||||
val prefix_path: string -> string list
|
|
||||||
val path_len: int
|
|
||||||
|
|
||||||
|
val to_path: t -> string list
|
||||||
|
val of_path: string list -> t option
|
||||||
|
val of_path_exn: string list -> t
|
||||||
|
|
||||||
|
val prefix_path: string -> string list
|
||||||
|
val path_length: int
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INTERNAL_MINIMAL_HASH = sig
|
||||||
|
include MINIMAL_HASH
|
||||||
|
module Table : Hashtbl.S with type key = t
|
||||||
end
|
end
|
||||||
|
|
||||||
module type HASH = sig
|
module type HASH = sig
|
||||||
@ -66,6 +81,21 @@ module type HASH = sig
|
|||||||
type Base58.data += Hash of t
|
type Base58.data += Hash of t
|
||||||
val b58check_encoding: t Base58.encoding
|
val b58check_encoding: t Base58.encoding
|
||||||
|
|
||||||
|
module Set : sig
|
||||||
|
include Set.S with type elt = t
|
||||||
|
val encoding: t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module Map : sig
|
||||||
|
include Map.S with type key = t
|
||||||
|
val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INTERNAL_HASH = sig
|
||||||
|
include HASH
|
||||||
|
module Table : Hashtbl.S with type key = t
|
||||||
end
|
end
|
||||||
|
|
||||||
module type Name = sig
|
module type Name = sig
|
||||||
@ -93,32 +123,43 @@ module Make_minimal_Blake2B (K : Name) = struct
|
|||||||
| Some x -> x
|
| Some x -> x
|
||||||
|
|
||||||
let of_string s =
|
let of_string s =
|
||||||
if String.length s <> size then begin
|
if String.length s <> size then
|
||||||
let msg =
|
None
|
||||||
Printf.sprintf "%s.of_string: wrong string size (%d)"
|
else
|
||||||
K.name (String.length s) in
|
Some (Sodium.Generichash.Bytes.to_hash (Bytes.of_string s))
|
||||||
raise (Invalid_argument msg)
|
let of_string_exn s =
|
||||||
end ;
|
match of_string s with
|
||||||
Sodium.Generichash.Bytes.to_hash (Bytes.of_string s)
|
| None ->
|
||||||
|
let msg =
|
||||||
|
Printf.sprintf "%s.of_string: wrong string size (%d)"
|
||||||
|
K.name (String.length s) in
|
||||||
|
raise (Invalid_argument msg)
|
||||||
|
| Some h -> h
|
||||||
let to_string s = Bytes.to_string (Sodium.Generichash.Bytes.of_hash s)
|
let to_string s = Bytes.to_string (Sodium.Generichash.Bytes.of_hash s)
|
||||||
|
|
||||||
let of_hex s = of_string (Hex_encode.hex_decode s)
|
let of_hex s = of_string (Hex_encode.hex_decode s)
|
||||||
|
let of_hex_exn s = of_string_exn (Hex_encode.hex_decode s)
|
||||||
let to_hex s = Hex_encode.hex_encode (to_string s)
|
let to_hex s = Hex_encode.hex_encode (to_string s)
|
||||||
|
|
||||||
let compare = Sodium.Generichash.compare
|
let compare = Sodium.Generichash.compare
|
||||||
let equal x y = compare x y = 0
|
let equal x y = compare x y = 0
|
||||||
|
|
||||||
let of_bytes b =
|
let of_bytes b =
|
||||||
if MBytes.length b <> size then begin
|
if MBytes.length b <> size then
|
||||||
let msg =
|
None
|
||||||
Printf.sprintf "%s.of_bytes: wrong string size (%d)"
|
else
|
||||||
K.name (MBytes.length b) in
|
Some (Sodium.Generichash.Bigbytes.to_hash b)
|
||||||
raise (Invalid_argument msg)
|
let of_bytes_exn b =
|
||||||
end ;
|
match of_bytes b with
|
||||||
Sodium.Generichash.Bigbytes.to_hash b
|
| None ->
|
||||||
|
let msg =
|
||||||
|
Printf.sprintf "%s.of_bytes: wrong string size (%d)"
|
||||||
|
K.name (MBytes.length b) in
|
||||||
|
raise (Invalid_argument msg)
|
||||||
|
| Some h -> h
|
||||||
let to_bytes = Sodium.Generichash.Bigbytes.of_hash
|
let to_bytes = Sodium.Generichash.Bigbytes.of_hash
|
||||||
|
|
||||||
let read src off = of_bytes @@ MBytes.sub src off size
|
let read src off = of_bytes_exn @@ MBytes.sub src off size
|
||||||
let write dst off h = MBytes.blit (to_bytes h) 0 dst off size
|
let write dst off h = MBytes.blit (to_bytes h) 0 dst off size
|
||||||
|
|
||||||
let hash_bytes l =
|
let hash_bytes l =
|
||||||
@ -135,8 +176,6 @@ module Make_minimal_Blake2B (K : Name) = struct
|
|||||||
l ;
|
l ;
|
||||||
final state
|
final state
|
||||||
|
|
||||||
module Set = Set.Make(struct type nonrec t = t let compare = compare end)
|
|
||||||
|
|
||||||
let fold_read f buf off len init =
|
let fold_read f buf off len init =
|
||||||
let last = off + len * size in
|
let last = off + len * size in
|
||||||
if last > MBytes.length buf then
|
if last > MBytes.length buf then
|
||||||
@ -150,19 +189,7 @@ module Make_minimal_Blake2B (K : Name) = struct
|
|||||||
in
|
in
|
||||||
loop init off
|
loop init off
|
||||||
|
|
||||||
module Map = Map.Make(struct type nonrec t = t let compare = compare end)
|
let path_length = 6
|
||||||
module Table =
|
|
||||||
Hashtbl.Make(struct
|
|
||||||
type nonrec t = t
|
|
||||||
let hash s =
|
|
||||||
Int64.to_int
|
|
||||||
(EndianString.BigEndian.get_int64
|
|
||||||
(Bytes.unsafe_to_string (Sodium.Generichash.Bytes.of_hash s))
|
|
||||||
0)
|
|
||||||
let equal = equal
|
|
||||||
end)
|
|
||||||
|
|
||||||
let path_len = 6
|
|
||||||
let to_path key =
|
let to_path key =
|
||||||
let key = to_hex key in
|
let key = to_hex key in
|
||||||
[ String.sub key 0 2 ; String.sub key 2 2 ;
|
[ String.sub key 0 2 ; String.sub key 2 2 ;
|
||||||
@ -171,6 +198,9 @@ module Make_minimal_Blake2B (K : Name) = struct
|
|||||||
let of_path path =
|
let of_path path =
|
||||||
let path = String.concat "" path in
|
let path = String.concat "" path in
|
||||||
of_hex path
|
of_hex path
|
||||||
|
let of_path_exn path =
|
||||||
|
let path = String.concat "" path in
|
||||||
|
of_hex_exn path
|
||||||
|
|
||||||
let prefix_path p =
|
let prefix_path p =
|
||||||
let p = Hex_encode.hex_encode p in
|
let p = Hex_encode.hex_encode p in
|
||||||
@ -183,6 +213,18 @@ module Make_minimal_Blake2B (K : Name) = struct
|
|||||||
and p6 = if len > 10 then String.sub p 10 (len - 10) else "" in
|
and p6 = if len > 10 then String.sub p 10 (len - 10) else "" in
|
||||||
[ p1 ; p2 ; p3 ; p4 ; p5 ; p6 ]
|
[ p1 ; p2 ; p3 ; p4 ; p5 ; p6 ]
|
||||||
|
|
||||||
|
module Table = struct
|
||||||
|
include Hashtbl.Make(struct
|
||||||
|
type nonrec t = t
|
||||||
|
let hash s =
|
||||||
|
Int64.to_int
|
||||||
|
(EndianString.BigEndian.get_int64
|
||||||
|
(Bytes.unsafe_to_string (Sodium.Generichash.Bytes.of_hash s))
|
||||||
|
0)
|
||||||
|
let equal = equal
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module Make_Blake2B (R : sig
|
module Make_Blake2B (R : sig
|
||||||
@ -206,7 +248,7 @@ module Make_Blake2B (R : sig
|
|||||||
~prefix: K.b58check_prefix
|
~prefix: K.b58check_prefix
|
||||||
~length:size
|
~length:size
|
||||||
~wrap: (fun s -> Hash s)
|
~wrap: (fun s -> Hash s)
|
||||||
~of_raw:(fun h -> Some (of_string h)) ~to_raw:to_string
|
~of_raw:(fun h -> of_string h) ~to_raw:to_string
|
||||||
|
|
||||||
let of_b58check s =
|
let of_b58check s =
|
||||||
match Base58.simple_decode b58check_encoding s with
|
match Base58.simple_decode b58check_encoding s with
|
||||||
@ -221,7 +263,7 @@ module Make_Blake2B (R : sig
|
|||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
splitted
|
splitted
|
||||||
~binary:
|
~binary:
|
||||||
(conv to_bytes of_bytes (Fixed.bytes size))
|
(conv to_bytes of_bytes_exn (Fixed.bytes size))
|
||||||
~json:
|
~json:
|
||||||
(describe ~title: (K.title ^ " (Base58Check-encoded Sha256)") @@
|
(describe ~title: (K.title ^ " (Base58Check-encoded Sha256)") @@
|
||||||
conv to_b58check (Data_encoding.Json.wrap_error of_b58check) string)
|
conv to_b58check (Data_encoding.Json.wrap_error of_b58check) string)
|
||||||
@ -235,6 +277,24 @@ module Make_Blake2B (R : sig
|
|||||||
let pp_short ppf t =
|
let pp_short ppf t =
|
||||||
Format.pp_print_string ppf (to_short_b58check t)
|
Format.pp_print_string ppf (to_short_b58check t)
|
||||||
|
|
||||||
|
module Set = struct
|
||||||
|
include Set.Make(struct type nonrec t = t let compare = compare end)
|
||||||
|
let encoding =
|
||||||
|
Data_encoding.conv
|
||||||
|
elements
|
||||||
|
(fun l -> List.fold_left (fun m x -> add x m) empty l)
|
||||||
|
Data_encoding.(list encoding)
|
||||||
|
end
|
||||||
|
|
||||||
|
module Map = struct
|
||||||
|
include Map.Make(struct type nonrec t = t let compare = compare end)
|
||||||
|
let encoding arg_encoding =
|
||||||
|
Data_encoding.conv
|
||||||
|
bindings
|
||||||
|
(fun l -> List.fold_left (fun m (k,v) -> add k v m) empty l)
|
||||||
|
Data_encoding.(list (tup2 encoding arg_encoding))
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
(*-- Hash sets and maps -----------------------------------------------------*)
|
(*-- Hash sets and maps -----------------------------------------------------*)
|
||||||
@ -278,10 +338,6 @@ module Block_hash =
|
|||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
|
|
||||||
module Block_hash_set = Hash_set (Block_hash)
|
|
||||||
module Block_hash_map = Hash_map (Block_hash)
|
|
||||||
module Block_hash_table = Hash_table (Block_hash)
|
|
||||||
|
|
||||||
module Operation_hash =
|
module Operation_hash =
|
||||||
Make_Blake2B (Base58) (struct
|
Make_Blake2B (Base58) (struct
|
||||||
let name = "Operation_hash"
|
let name = "Operation_hash"
|
||||||
@ -290,10 +346,6 @@ module Operation_hash =
|
|||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
|
|
||||||
module Operation_hash_set = Hash_set (Operation_hash)
|
|
||||||
module Operation_hash_map = Hash_map (Operation_hash)
|
|
||||||
module Operation_hash_table = Hash_table (Operation_hash)
|
|
||||||
|
|
||||||
module Protocol_hash =
|
module Protocol_hash =
|
||||||
Make_Blake2B (Base58) (struct
|
Make_Blake2B (Base58) (struct
|
||||||
let name = "Protocol_hash"
|
let name = "Protocol_hash"
|
||||||
@ -302,10 +354,6 @@ module Protocol_hash =
|
|||||||
let size = None
|
let size = None
|
||||||
end)
|
end)
|
||||||
|
|
||||||
module Protocol_hash_set = Hash_set (Protocol_hash)
|
|
||||||
module Protocol_hash_map = Hash_map (Protocol_hash)
|
|
||||||
module Protocol_hash_table = Hash_table (Protocol_hash)
|
|
||||||
|
|
||||||
module Generic_hash =
|
module Generic_hash =
|
||||||
Make_minimal_Blake2B (struct
|
Make_minimal_Blake2B (struct
|
||||||
let name = "Generic_hash"
|
let name = "Generic_hash"
|
||||||
|
@ -30,19 +30,34 @@ module type MINIMAL_HASH = sig
|
|||||||
val size: int (* in bytes *)
|
val size: int (* in bytes *)
|
||||||
val compare: t -> t -> int
|
val compare: t -> t -> int
|
||||||
val equal: t -> t -> bool
|
val equal: t -> t -> bool
|
||||||
val of_hex: string -> t
|
|
||||||
val to_hex: t -> string
|
val to_hex: t -> string
|
||||||
val of_string: string -> t
|
val of_hex: string -> t option
|
||||||
|
val of_hex_exn: string -> t
|
||||||
|
|
||||||
val to_string: t -> string
|
val to_string: t -> string
|
||||||
|
val of_string: string -> t option
|
||||||
|
val of_string_exn: string -> t
|
||||||
|
|
||||||
val to_bytes: t -> MBytes.t
|
val to_bytes: t -> MBytes.t
|
||||||
val of_bytes: MBytes.t -> t
|
val of_bytes: MBytes.t -> t option
|
||||||
|
val of_bytes_exn: MBytes.t -> t
|
||||||
|
|
||||||
val read: MBytes.t -> int -> t
|
val read: MBytes.t -> int -> t
|
||||||
val write: MBytes.t -> int -> t -> unit
|
val write: MBytes.t -> int -> t -> unit
|
||||||
val to_path: t -> string list
|
|
||||||
val of_path: string list -> t
|
|
||||||
val prefix_path: string -> string list
|
|
||||||
val path_len: int
|
|
||||||
|
|
||||||
|
val to_path: t -> string list
|
||||||
|
val of_path: string list -> t option
|
||||||
|
val of_path_exn: string list -> t
|
||||||
|
|
||||||
|
val prefix_path: string -> string list
|
||||||
|
val path_length: int
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INTERNAL_MINIMAL_HASH = sig
|
||||||
|
include MINIMAL_HASH
|
||||||
|
module Table : Hashtbl.S with type key = t
|
||||||
end
|
end
|
||||||
|
|
||||||
module type HASH = sig
|
module type HASH = sig
|
||||||
@ -58,6 +73,21 @@ module type HASH = sig
|
|||||||
type Base58.data += Hash of t
|
type Base58.data += Hash of t
|
||||||
val b58check_encoding: t Base58.encoding
|
val b58check_encoding: t Base58.encoding
|
||||||
|
|
||||||
|
module Set : sig
|
||||||
|
include Set.S with type elt = t
|
||||||
|
val encoding: t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
module Map : sig
|
||||||
|
include Map.S with type key = t
|
||||||
|
val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module type INTERNAL_HASH = sig
|
||||||
|
include HASH
|
||||||
|
module Table : Hashtbl.S with type key = t
|
||||||
end
|
end
|
||||||
|
|
||||||
(** {2 Building Hashes} *******************************************************)
|
(** {2 Building Hashes} *******************************************************)
|
||||||
@ -78,7 +108,7 @@ module type PrefixedName = sig
|
|||||||
end
|
end
|
||||||
|
|
||||||
(** Builds a new Hash type using Sha256. *)
|
(** Builds a new Hash type using Sha256. *)
|
||||||
module Make_minimal_Blake2B (Name : Name) : MINIMAL_HASH
|
module Make_minimal_Blake2B (Name : Name) : INTERNAL_MINIMAL_HASH
|
||||||
module Make_Blake2B
|
module Make_Blake2B
|
||||||
(Register : sig
|
(Register : sig
|
||||||
val register_encoding:
|
val register_encoding:
|
||||||
@ -89,28 +119,13 @@ module Make_Blake2B
|
|||||||
wrap: ('a -> Base58.data) ->
|
wrap: ('a -> Base58.data) ->
|
||||||
'a Base58.encoding
|
'a Base58.encoding
|
||||||
end)
|
end)
|
||||||
(Name : PrefixedName) : HASH
|
(Name : PrefixedName) : INTERNAL_HASH
|
||||||
|
|
||||||
(** Builds a Set of values of some Hash type. *)
|
|
||||||
module Hash_set (Hash : HASH) : sig
|
|
||||||
include Set.S with type elt = Hash.t
|
|
||||||
val encoding: t Data_encoding.t
|
|
||||||
end
|
|
||||||
|
|
||||||
(** Builds a Map using some Hash type as keys. *)
|
|
||||||
module Hash_map (Hash : HASH) : sig
|
|
||||||
include Map.S with type key = Hash.t
|
|
||||||
val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t
|
|
||||||
end
|
|
||||||
|
|
||||||
(** Builds a Hashtbl using some Hash type as keys. *)
|
|
||||||
module Hash_table (Hash : MINIMAL_HASH) : Hashtbl.S with type key = Hash.t
|
|
||||||
|
|
||||||
(** {2 Predefined Hashes } ****************************************************)
|
(** {2 Predefined Hashes } ****************************************************)
|
||||||
|
|
||||||
(** Blocks hashes / IDs. *)
|
(** Blocks hashes / IDs. *)
|
||||||
module Block_hash : sig
|
module Block_hash : sig
|
||||||
include HASH
|
include INTERNAL_HASH
|
||||||
val param :
|
val param :
|
||||||
?name:string ->
|
?name:string ->
|
||||||
?desc:string ->
|
?desc:string ->
|
||||||
@ -118,20 +133,10 @@ module Block_hash : sig
|
|||||||
(t -> 'a, 'arg, 'ret) Cli_entries.params
|
(t -> 'a, 'arg, 'ret) Cli_entries.params
|
||||||
end
|
end
|
||||||
|
|
||||||
module Block_hash_set : module type of Hash_set (Block_hash)
|
|
||||||
module Block_hash_map : module type of Hash_map (Block_hash)
|
|
||||||
module Block_hash_table : module type of Hash_table (Block_hash)
|
|
||||||
|
|
||||||
(** Operations hashes / IDs. *)
|
(** Operations hashes / IDs. *)
|
||||||
module Operation_hash : HASH
|
module Operation_hash : INTERNAL_HASH
|
||||||
module Operation_hash_set : Set.S with type elt = Operation_hash.t
|
|
||||||
module Operation_hash_map : module type of Hash_map (Operation_hash)
|
|
||||||
module Operation_hash_table : module type of Hash_table (Operation_hash)
|
|
||||||
|
|
||||||
(** Protocol versions / source hashes. *)
|
(** Protocol versions / source hashes. *)
|
||||||
module Protocol_hash : HASH
|
module Protocol_hash : INTERNAL_HASH
|
||||||
module Protocol_hash_set : module type of Hash_set (Protocol_hash)
|
|
||||||
module Protocol_hash_map : module type of Hash_map (Protocol_hash)
|
|
||||||
module Protocol_hash_table : module type of Hash_table (Protocol_hash)
|
|
||||||
|
|
||||||
module Generic_hash : MINIMAL_HASH
|
module Generic_hash : INTERNAL_MINIMAL_HASH
|
||||||
|
@ -55,23 +55,35 @@ let equal_error_monad ?msg exn1 exn2 =
|
|||||||
| Error_monad.Unclassified err -> err in
|
| Error_monad.Unclassified err -> err in
|
||||||
Assert.equal ?msg ~prn exn1 exn2
|
Assert.equal ?msg ~prn exn1 exn2
|
||||||
|
|
||||||
|
let equal_block_set ?msg set1 set2 =
|
||||||
|
let msg = format_msg msg in
|
||||||
|
let b1 = Block_hash.Set.elements set1
|
||||||
|
and b2 = Block_hash.Set.elements set2 in
|
||||||
|
Assert.make_equal_list ?msg
|
||||||
|
(fun h1 h2 -> Block_hash.equal h1 h2)
|
||||||
|
Block_hash.to_string
|
||||||
|
b1 b2
|
||||||
|
|
||||||
let equal_block_map ?msg ~eq map1 map2 =
|
let equal_block_map ?msg ~eq map1 map2 =
|
||||||
let msg = format_msg msg in
|
let msg = format_msg msg in
|
||||||
let open Hash in
|
let b1 = Block_hash.Map.bindings map1
|
||||||
let module BlockMap = Hash_map(Block_hash) in
|
and b2 = Block_hash.Map.bindings map2 in
|
||||||
Assert.equal ?msg ~eq map1 map2
|
Assert.make_equal_list ?msg
|
||||||
|
(fun (h1, b1) (h2, b2) -> Block_hash.equal h1 h2 && eq b1 b2)
|
||||||
|
(fun (h1, _) -> Block_hash.to_string h1)
|
||||||
|
b1 b2
|
||||||
|
|
||||||
let equal_operation ?msg op1 op2 =
|
let equal_operation ?msg op1 op2 =
|
||||||
let msg = format_msg msg in
|
let msg = format_msg msg in
|
||||||
let eq op1 op2 =
|
let eq op1 op2 =
|
||||||
match op1, op2 with
|
match op1, op2 with
|
||||||
| None, None -> true
|
| None, None -> true
|
||||||
| Some (h1, op1), Some (h2, op2) ->
|
| Some op1, Some op2 ->
|
||||||
Hash.Operation_hash.equal h1 h2 && op1 = op2
|
Store.Operation.equal op1 op2
|
||||||
| _ -> false in
|
| _ -> false in
|
||||||
let prn = function
|
let prn = function
|
||||||
| None -> "none"
|
| None -> "none"
|
||||||
| Some (h, op) -> Hash.Operation_hash.to_hex h in
|
| Some op -> Hash.Operation_hash.to_hex (Store.Operation.hash op) in
|
||||||
Assert.equal ?msg ~prn ~eq op1 op2
|
Assert.equal ?msg ~prn ~eq op1 op2
|
||||||
|
|
||||||
let equal_block ?msg st1 st2 =
|
let equal_block ?msg st1 st2 =
|
||||||
@ -79,12 +91,12 @@ let equal_block ?msg st1 st2 =
|
|||||||
let eq st1 st2 =
|
let eq st1 st2 =
|
||||||
match st1, st2 with
|
match st1, st2 with
|
||||||
| None, None -> true
|
| None, None -> true
|
||||||
| Some (h1, st1), Some (h2, st2) ->
|
| Some st1, Some st2 -> Store.Block_header.equal st1 st2
|
||||||
Hash.Block_hash.equal h1 h2 && st1 = st2
|
|
||||||
| _ -> false in
|
| _ -> false in
|
||||||
let prn = function
|
let prn = function
|
||||||
| None -> "none"
|
| None -> "none"
|
||||||
| Some (h, st) -> Hash.Block_hash.to_hex h in
|
| Some st ->
|
||||||
|
Hash.Block_hash.to_hex (Store.Block_header.hash st) in
|
||||||
Assert.equal ?msg ~prn ~eq st1 st2
|
Assert.equal ?msg ~prn ~eq st1 st2
|
||||||
|
|
||||||
let equal_result ?msg r1 r2 ~equal_ok ~equal_err =
|
let equal_result ?msg r1 r2 ~equal_ok ~equal_err =
|
||||||
|
@ -32,18 +32,23 @@ val equal_string_option : ?msg:string -> string option -> string option -> unit
|
|||||||
val equal_error_monad :
|
val equal_error_monad :
|
||||||
?msg:string -> Error_monad.error -> Error_monad.error -> unit
|
?msg:string -> Error_monad.error -> Error_monad.error -> unit
|
||||||
|
|
||||||
val equal_block_map : ?msg:string -> eq:('a -> 'a -> bool) -> 'a -> 'a -> unit
|
val equal_block_set :
|
||||||
|
?msg:string -> Block_hash.Set.t -> Block_hash.Set.t -> unit
|
||||||
|
|
||||||
|
val equal_block_map :
|
||||||
|
?msg:string -> eq:('a -> 'a -> bool) ->
|
||||||
|
'a Block_hash.Map.t -> 'a Block_hash.Map.t -> unit
|
||||||
|
|
||||||
val equal_operation :
|
val equal_operation :
|
||||||
?msg:string ->
|
?msg:string ->
|
||||||
(Operation_hash.t * State.Operation.operation) option ->
|
State.Operation.t option ->
|
||||||
(Operation_hash.t * State.Operation.operation) option ->
|
State.Operation.t option ->
|
||||||
unit
|
unit
|
||||||
|
|
||||||
val equal_block :
|
val equal_block :
|
||||||
?msg:string ->
|
?msg:string ->
|
||||||
(Block_hash.t * Store.block) option ->
|
Store.Block_header.t option ->
|
||||||
(Block_hash.t * Store.block) option ->
|
Store.Block_header.t option ->
|
||||||
unit
|
unit
|
||||||
|
|
||||||
val equal_result :
|
val equal_result :
|
||||||
|
@ -15,12 +15,14 @@ let make_test ~title test =
|
|||||||
Test.add_simple_test ~title (fun () -> Lwt_main.run (test ()))
|
Test.add_simple_test ~title (fun () -> Lwt_main.run (test ()))
|
||||||
|
|
||||||
let rec remove_dir dir =
|
let rec remove_dir dir =
|
||||||
Array.iter (fun file ->
|
if Sys.file_exists dir then begin
|
||||||
let f = Filename.concat dir file in
|
Array.iter (fun file ->
|
||||||
if Sys.is_directory f then remove_dir f
|
let f = Filename.concat dir file in
|
||||||
else Sys.remove f)
|
if Sys.is_directory f then remove_dir f
|
||||||
(Sys.readdir dir);
|
else Sys.remove f)
|
||||||
Unix.rmdir dir
|
(Sys.readdir dir);
|
||||||
|
Unix.rmdir dir
|
||||||
|
end
|
||||||
|
|
||||||
let output name res =
|
let output name res =
|
||||||
let open Kaputt in
|
let open Kaputt in
|
||||||
@ -104,7 +106,7 @@ let run prefix tests =
|
|||||||
(fun () ->
|
(fun () ->
|
||||||
let finalise () =
|
let finalise () =
|
||||||
if keep_dir then
|
if keep_dir then
|
||||||
Format.eprintf "Data saved kept "
|
Format.eprintf "Kept data dir %s@." base_dir
|
||||||
else
|
else
|
||||||
remove_dir base_dir
|
remove_dir base_dir
|
||||||
in
|
in
|
||||||
|
@ -27,21 +27,23 @@ let genesis_protocol =
|
|||||||
let genesis_time =
|
let genesis_time =
|
||||||
Time.of_seconds 0L
|
Time.of_seconds 0L
|
||||||
|
|
||||||
let genesis = {
|
let genesis : State.Net.genesis = {
|
||||||
Store.time = genesis_time ;
|
time = genesis_time ;
|
||||||
block = genesis_block ;
|
block = genesis_block ;
|
||||||
protocol = genesis_protocol ;
|
protocol = genesis_protocol ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let net_id = State.Net_id.Id genesis_block
|
||||||
|
|
||||||
(** Context creation *)
|
(** Context creation *)
|
||||||
|
|
||||||
let block2 =
|
let block2 =
|
||||||
Block_hash.of_hex
|
Block_hash.of_hex_exn
|
||||||
"2222222222222222222222222222222222222222222222222222222222222222"
|
"2222222222222222222222222222222222222222222222222222222222222222"
|
||||||
|
|
||||||
let faked_block : Store.block = {
|
let faked_block : Store.Block_header.t = {
|
||||||
shell = {
|
shell = {
|
||||||
net_id = Net genesis_block ;
|
net_id ;
|
||||||
predecessor = genesis_block ;
|
predecessor = genesis_block ;
|
||||||
operations = [] ;
|
operations = [] ;
|
||||||
fitness = [] ;
|
fitness = [] ;
|
||||||
@ -52,52 +54,55 @@ let faked_block : Store.block = {
|
|||||||
|
|
||||||
let create_block2 idx =
|
let create_block2 idx =
|
||||||
checkout idx genesis_block >>= function
|
checkout idx genesis_block >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout genesis_block"
|
Assert.fail_msg "checkout genesis_block"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt ->
|
set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt ->
|
||||||
set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt ->
|
set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt ->
|
||||||
set ctxt ["version";] (MBytes.of_string "0.0") >>= fun ctxt ->
|
set ctxt ["version";] (MBytes.of_string "0.0") >>= fun ctxt ->
|
||||||
commit idx faked_block block2 ctxt
|
commit faked_block block2 ctxt
|
||||||
|
|
||||||
let block3a =
|
let block3a =
|
||||||
Block_hash.of_hex
|
Block_hash.of_hex_exn
|
||||||
"3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a"
|
"3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a"
|
||||||
|
|
||||||
let create_block3a idx =
|
let create_block3a idx =
|
||||||
checkout idx block2 >>= function
|
checkout idx block2 >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout block2"
|
Assert.fail_msg "checkout block2"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
del ctxt ["a"; "b"] >>= fun ctxt ->
|
del ctxt ["a"; "b"] >>= fun ctxt ->
|
||||||
set ctxt ["a"; "d"] (MBytes.of_string "Mars") >>= fun ctxt ->
|
set ctxt ["a"; "d"] (MBytes.of_string "Mars") >>= fun ctxt ->
|
||||||
commit idx faked_block block3a ctxt
|
commit faked_block block3a ctxt
|
||||||
|
|
||||||
let block3b =
|
let block3b =
|
||||||
Block_hash.of_hex
|
Block_hash.of_hex_exn
|
||||||
"3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b"
|
"3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b"
|
||||||
|
|
||||||
let block3c =
|
let block3c =
|
||||||
Block_hash.of_hex
|
Block_hash.of_hex_exn
|
||||||
"3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c"
|
"3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c"
|
||||||
|
|
||||||
let create_block3b idx =
|
let create_block3b idx =
|
||||||
checkout idx block2 >>= function
|
checkout idx block2 >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout block3b"
|
Assert.fail_msg "checkout block3b"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
del ctxt ["a"; "c"] >>= fun ctxt ->
|
del ctxt ["a"; "c"] >>= fun ctxt ->
|
||||||
set ctxt ["a"; "d"] (MBytes.of_string "Février") >>= fun ctxt ->
|
set ctxt ["a"; "d"] (MBytes.of_string "Février") >>= fun ctxt ->
|
||||||
commit idx faked_block block3b ctxt
|
commit faked_block block3b ctxt
|
||||||
|
|
||||||
let wrap_context_init f base_dir =
|
let wrap_context_init f base_dir =
|
||||||
let root = base_dir // "context" in
|
let root = base_dir // "context" in
|
||||||
Context.init root >>= fun idx ->
|
Context.init root >>= fun idx ->
|
||||||
Context.create_genesis_context idx genesis genesis_protocol >>= fun _ ->
|
Context.commit_genesis idx
|
||||||
|
~id:genesis.block
|
||||||
|
~time:genesis.time
|
||||||
|
~protocol:genesis.protocol
|
||||||
|
~test_protocol:genesis.protocol >>= fun _ ->
|
||||||
create_block2 idx >>= fun () ->
|
create_block2 idx >>= fun () ->
|
||||||
create_block3a idx >>= fun () ->
|
create_block3a idx >>= fun () ->
|
||||||
create_block3b idx >>= fun () ->
|
create_block3b idx >>= fun () ->
|
||||||
commit_invalid idx faked_block block3c [Error_monad.Unclassified "TEST"] >>= fun () ->
|
|
||||||
f idx
|
f idx
|
||||||
|
|
||||||
(** Simple test *)
|
(** Simple test *)
|
||||||
@ -108,9 +113,9 @@ let c = function
|
|||||||
|
|
||||||
let test_simple idx =
|
let test_simple idx =
|
||||||
checkout idx block2 >>= function
|
checkout idx block2 >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout block2"
|
Assert.fail_msg "checkout block2"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
get ctxt ["version"] >>= fun version ->
|
get ctxt ["version"] >>= fun version ->
|
||||||
Assert.equal_string_option ~msg:__LOC__ (c version) (Some "0.0") ;
|
Assert.equal_string_option ~msg:__LOC__ (c version) (Some "0.0") ;
|
||||||
get ctxt ["a";"b"] >>= fun novembre ->
|
get ctxt ["a";"b"] >>= fun novembre ->
|
||||||
@ -121,9 +126,9 @@ let test_simple idx =
|
|||||||
|
|
||||||
let test_continuation idx =
|
let test_continuation idx =
|
||||||
checkout idx block3a >>= function
|
checkout idx block3a >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout block3a"
|
Assert.fail_msg "checkout block3a"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
get ctxt ["version"] >>= fun version ->
|
get ctxt ["version"] >>= fun version ->
|
||||||
Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ;
|
Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ;
|
||||||
get ctxt ["a";"b"] >>= fun novembre ->
|
get ctxt ["a";"b"] >>= fun novembre ->
|
||||||
@ -136,9 +141,9 @@ let test_continuation idx =
|
|||||||
|
|
||||||
let test_fork idx =
|
let test_fork idx =
|
||||||
checkout idx block3b >>= function
|
checkout idx block3b >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout block3b"
|
Assert.fail_msg "checkout block3b"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
get ctxt ["version"] >>= fun version ->
|
get ctxt ["version"] >>= fun version ->
|
||||||
Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ;
|
Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ;
|
||||||
get ctxt ["a";"b"] >>= fun novembre ->
|
get ctxt ["a";"b"] >>= fun novembre ->
|
||||||
@ -151,9 +156,9 @@ let test_fork idx =
|
|||||||
|
|
||||||
let test_replay idx =
|
let test_replay idx =
|
||||||
checkout idx genesis_block >>= function
|
checkout idx genesis_block >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout genesis_block"
|
Assert.fail_msg "checkout genesis_block"
|
||||||
| Some (Ok ctxt0) ->
|
| Some ctxt0 ->
|
||||||
set ctxt0 ["version"] (MBytes.of_string "0.0") >>= fun ctxt1 ->
|
set ctxt0 ["version"] (MBytes.of_string "0.0") >>= fun ctxt1 ->
|
||||||
set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt2 ->
|
set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt2 ->
|
||||||
set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt3 ->
|
set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt3 ->
|
||||||
@ -174,9 +179,9 @@ let test_replay idx =
|
|||||||
|
|
||||||
let test_list idx =
|
let test_list idx =
|
||||||
checkout idx genesis_block >>= function
|
checkout idx genesis_block >>= function
|
||||||
| None | Some (Error _) ->
|
| None ->
|
||||||
Assert.fail_msg "checkout genesis_block"
|
Assert.fail_msg "checkout genesis_block"
|
||||||
| Some (Ok ctxt) ->
|
| Some ctxt ->
|
||||||
set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt ->
|
set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt ->
|
||||||
set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt ->
|
set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt ->
|
||||||
set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun ctxt ->
|
set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun ctxt ->
|
||||||
@ -198,19 +203,6 @@ let test_list idx =
|
|||||||
[["a"; "b"]; ["a"; "c"]; ["a"; "d"]; ["g"; "h"]] l ;
|
[["a"; "b"]; ["a"; "c"]; ["a"; "d"]; ["g"; "h"]] l ;
|
||||||
Lwt.return ()
|
Lwt.return ()
|
||||||
|
|
||||||
let test_invalid idx =
|
|
||||||
checkout idx block3c >>= function
|
|
||||||
| Some (Error [exn]) ->
|
|
||||||
Assert.equal_error_monad
|
|
||||||
~msg:__LOC__(Error_monad.Unclassified "TEST") exn ;
|
|
||||||
Lwt.return_unit
|
|
||||||
| Some (Error _) ->
|
|
||||||
Assert.fail_msg "checkout unexpected error in block3c"
|
|
||||||
| Some (Ok _) ->
|
|
||||||
Assert.fail_msg "checkout valid block3c"
|
|
||||||
| None ->
|
|
||||||
Assert.fail_msg "checkout absent block3c"
|
|
||||||
|
|
||||||
|
|
||||||
(******************************************************************************)
|
(******************************************************************************)
|
||||||
|
|
||||||
@ -220,7 +212,6 @@ let tests : (string * (index -> unit Lwt.t)) list = [
|
|||||||
"fork", test_fork ;
|
"fork", test_fork ;
|
||||||
"replay", test_replay ;
|
"replay", test_replay ;
|
||||||
"list", test_list ;
|
"list", test_list ;
|
||||||
"invalid", test_invalid ;
|
|
||||||
]
|
]
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
|
@ -27,12 +27,14 @@ let genesis_time =
|
|||||||
|
|
||||||
module Proto = (val Updater.get_exn genesis_protocol)
|
module Proto = (val Updater.get_exn genesis_protocol)
|
||||||
|
|
||||||
let genesis = {
|
let genesis : State.Net.genesis = {
|
||||||
Store.time = genesis_time ;
|
time = genesis_time ;
|
||||||
block = genesis_block ;
|
block = genesis_block ;
|
||||||
protocol = genesis_protocol ;
|
protocol = genesis_protocol ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let net_id = State.Net_id.Id genesis_block
|
||||||
|
|
||||||
let incr_fitness fitness =
|
let incr_fitness fitness =
|
||||||
let new_fitness =
|
let new_fitness =
|
||||||
match fitness with
|
match fitness with
|
||||||
@ -48,20 +50,20 @@ let incr_fitness fitness =
|
|||||||
[ MBytes.of_string "\000" ; new_fitness ]
|
[ MBytes.of_string "\000" ; new_fitness ]
|
||||||
|
|
||||||
let incr_timestamp timestamp =
|
let incr_timestamp timestamp =
|
||||||
Time.add timestamp (Random.int64 10L)
|
Time.add timestamp (Int64.add 1L (Random.int64 10L))
|
||||||
|
|
||||||
let operation op =
|
let operation op =
|
||||||
let op : Store.operation = {
|
let op : Store.Operation.t = {
|
||||||
shell = { net_id = Net genesis_block } ;
|
shell = { net_id } ;
|
||||||
proto = MBytes.of_string op ;
|
proto = MBytes.of_string op ;
|
||||||
} in
|
} in
|
||||||
Store.Operation.hash op,
|
Store.Operation.hash op,
|
||||||
op,
|
op,
|
||||||
Store.Operation.to_bytes op
|
Data_encoding.Binary.to_bytes Store.Operation.encoding op
|
||||||
|
|
||||||
let block state ?(operations = []) pred_hash pred name : Store.block =
|
let block state ?(operations = []) pred_hash pred name : Store.Block_header.t =
|
||||||
let fitness = incr_fitness pred.Store.shell.fitness in
|
let fitness = incr_fitness pred.Store.Block_header.shell.fitness in
|
||||||
let timestamp = incr_timestamp pred.Store.shell.timestamp in
|
let timestamp = incr_timestamp pred.shell.timestamp in
|
||||||
{ shell = {
|
{ shell = {
|
||||||
net_id = pred.shell.net_id ;
|
net_id = pred.shell.net_id ;
|
||||||
predecessor = pred_hash ;
|
predecessor = pred_hash ;
|
||||||
@ -74,16 +76,20 @@ let build_chain state tbl otbl pred names =
|
|||||||
(fun (pred_hash, pred) name ->
|
(fun (pred_hash, pred) name ->
|
||||||
begin
|
begin
|
||||||
let oph, op, bytes = operation name in
|
let oph, op, bytes = operation name in
|
||||||
State.Operation.store state bytes >>=? fun op' ->
|
State.Operation.store state op >>= fun created ->
|
||||||
Assert.equal_operation ~msg:__LOC__ (Some (oph, op)) op' ;
|
Assert.is_true ~msg:__LOC__ created ;
|
||||||
State.Operation.mark_invalid state oph [] >>= fun state_invalid ->
|
State.Operation.read_opt state oph >>= fun op' ->
|
||||||
Assert.is_true ~msg:__LOC__ state_invalid ;
|
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||||
|
State.Operation.mark_invalid state oph [] >>= fun store_invalid ->
|
||||||
|
Assert.is_true ~msg:__LOC__ store_invalid ;
|
||||||
Hashtbl.add otbl name (oph, Error []) ;
|
Hashtbl.add otbl name (oph, Error []) ;
|
||||||
let block = block ~operations:[oph] state pred_hash pred name in
|
let block = block ~operations:[oph] state pred_hash pred name in
|
||||||
let hash = Store.Block.hash block in
|
State.Block_header.store state block >>= fun created ->
|
||||||
State.Block.store state (Store.Block.to_bytes block) >>=? fun block' ->
|
Assert.is_true ~msg:__LOC__ created ;
|
||||||
Assert.equal_block ~msg:__LOC__ (Some (hash, block)) block' ;
|
let hash = Store.Block_header.hash block in
|
||||||
State.Valid_block.store_invalid state hash [] >>= fun store_invalid ->
|
State.Block_header.read_opt state hash >>= fun block' ->
|
||||||
|
Assert.equal_block ~msg:__LOC__ (Some block) block' ;
|
||||||
|
State.Block_header.mark_invalid state hash [] >>= fun store_invalid ->
|
||||||
Assert.is_true ~msg:__LOC__ store_invalid ;
|
Assert.is_true ~msg:__LOC__ store_invalid ;
|
||||||
Hashtbl.add tbl name (hash, block) ;
|
Hashtbl.add tbl name (hash, block) ;
|
||||||
return (hash, block)
|
return (hash, block)
|
||||||
@ -97,7 +103,7 @@ let build_chain state tbl otbl pred names =
|
|||||||
Lwt.return ()
|
Lwt.return ()
|
||||||
|
|
||||||
let block state ?(operations = []) (pred: State.Valid_block.t) name
|
let block state ?(operations = []) (pred: State.Valid_block.t) name
|
||||||
: State.Block. t =
|
: State.Block_header.t =
|
||||||
let fitness = incr_fitness pred.fitness in
|
let fitness = incr_fitness pred.fitness in
|
||||||
let timestamp = incr_timestamp pred.timestamp in
|
let timestamp = incr_timestamp pred.timestamp in
|
||||||
{ shell = { net_id = pred.net_id ;
|
{ shell = { net_id = pred.net_id ;
|
||||||
@ -106,24 +112,27 @@ let block state ?(operations = []) (pred: State.Valid_block.t) name
|
|||||||
proto = MBytes.of_string name ;
|
proto = MBytes.of_string name ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let build_valid_chain state net tbl vtbl otbl pred names =
|
let build_valid_chain state tbl vtbl otbl pred names =
|
||||||
Lwt_list.fold_left_s
|
Lwt_list.fold_left_s
|
||||||
(fun pred name ->
|
(fun pred name ->
|
||||||
begin
|
begin
|
||||||
let oph, op, bytes = operation name in
|
let oph, op, bytes = operation name in
|
||||||
State.Operation.store state bytes >>=? fun op' ->
|
State.Operation.store state op >>= fun created ->
|
||||||
Assert.equal_operation ~msg:__LOC__ (Some (oph, op)) op' ;
|
Assert.is_true ~msg:__LOC__ created ;
|
||||||
State.Net.Mempool.add net oph >>= fun add_status ->
|
State.Operation.read_opt state oph >>= fun op' ->
|
||||||
Assert.is_true ~msg:__LOC__ add_status ;
|
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||||
Hashtbl.add otbl name (oph, Ok op) ;
|
Hashtbl.add otbl name (oph, Ok op) ;
|
||||||
let block = block state ~operations:[oph] pred name in
|
let block = block state ~operations:[oph] pred name in
|
||||||
let hash = Store.Block.hash block in
|
State.Block_header.store state block >>= fun created ->
|
||||||
State.Block.store state (Store.Block.to_bytes block) >>=? fun block' ->
|
Assert.is_true ~msg:__LOC__ created ;
|
||||||
Assert.equal_block ~msg:__LOC__ (Some (hash, block)) block' ;
|
let hash = Store.Block_header.hash block in
|
||||||
|
State.Block_header.read_opt state hash >>= fun block' ->
|
||||||
|
Assert.equal_block ~msg:__LOC__ (Some block) block' ;
|
||||||
Hashtbl.add tbl name (hash, block) ;
|
Hashtbl.add tbl name (hash, block) ;
|
||||||
Lwt.return (Proto.parse_block block) >>=? fun block ->
|
Lwt.return (Proto.parse_block block) >>=? fun block ->
|
||||||
Proto.apply pred.context block [] >>=? fun ctxt ->
|
Proto.apply pred.context block [] >>=? fun ctxt ->
|
||||||
State.Valid_block.store state hash ctxt >>=? fun vblock ->
|
State.Valid_block.store state hash ctxt >>=? fun _vblock ->
|
||||||
|
State.Valid_block.read state hash >>=? fun vblock ->
|
||||||
Hashtbl.add vtbl name vblock ;
|
Hashtbl.add vtbl name vblock ;
|
||||||
return vblock
|
return vblock
|
||||||
end >>= function
|
end >>= function
|
||||||
@ -135,40 +144,36 @@ let build_valid_chain state net tbl vtbl otbl pred names =
|
|||||||
names >>= fun _ ->
|
names >>= fun _ ->
|
||||||
Lwt.return ()
|
Lwt.return ()
|
||||||
|
|
||||||
let build_example_tree state net =
|
let build_example_tree net =
|
||||||
let tbl = Hashtbl.create 23 in
|
let tbl = Hashtbl.create 23 in
|
||||||
let vtbl = Hashtbl.create 23 in
|
let vtbl = Hashtbl.create 23 in
|
||||||
let otbl = Hashtbl.create 23 in
|
let otbl = Hashtbl.create 23 in
|
||||||
State.Net.Blockchain.genesis net >>= fun genesis ->
|
State.Valid_block.Current.genesis net >>= fun genesis ->
|
||||||
|
Hashtbl.add vtbl "Genesis" genesis ;
|
||||||
|
Hashtbl.add tbl "Genesis" (genesis.hash, { State.Block_header.shell = genesis.shell_header ; proto = MBytes.create 0 } ) ;
|
||||||
let chain = [ "A1" ; "A2" ; "A3" ; "A4" ; "A5" ; "A6" ; "A7" ; "A8" ] in
|
let chain = [ "A1" ; "A2" ; "A3" ; "A4" ; "A5" ; "A6" ; "A7" ; "A8" ] in
|
||||||
build_valid_chain state net tbl vtbl otbl genesis chain >>= fun () ->
|
build_valid_chain net tbl vtbl otbl genesis chain >>= fun () ->
|
||||||
let a3 = Hashtbl.find vtbl "A3" in
|
let a3 = Hashtbl.find vtbl "A3" in
|
||||||
let chain = [ "B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] in
|
let chain = [ "B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] in
|
||||||
build_valid_chain state net tbl vtbl otbl a3 chain >>= fun () ->
|
build_valid_chain net tbl vtbl otbl a3 chain >>= fun () ->
|
||||||
let b7 = Hashtbl.find tbl "B7" in
|
let b7 = Hashtbl.find tbl "B7" in
|
||||||
let chain = [ "C1" ; "C2" ; "C3" ; "C4" ; "C5" ; "C6" ; "C7" ; "C8" ] in
|
let chain = [ "C1" ; "C2" ; "C3" ; "C4" ; "C5" ; "C6" ; "C7" ; "C8" ] in
|
||||||
build_chain state tbl otbl b7 chain >>= fun () ->
|
build_chain net tbl otbl b7 chain >>= fun () ->
|
||||||
let pending_op = "PP" in
|
let pending_op = "PP" in
|
||||||
let oph, op, bytes = operation pending_op in
|
let oph, op, bytes = operation pending_op in
|
||||||
State.Operation.store state bytes >>= fun op' ->
|
State.Operation.store net op >>= fun _ ->
|
||||||
Assert.equal_result
|
State.Operation.read_opt net oph >>= fun op' ->
|
||||||
~msg:__LOC__
|
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||||
(Ok (Some (oph, op)))
|
|
||||||
op'
|
|
||||||
~equal_ok:Assert.equal_operation
|
|
||||||
~equal_err:(fun ?msg _ _ -> Assert.fail_msg "Operations differs") ;
|
|
||||||
Hashtbl.add otbl pending_op (oph, Ok op) ;
|
Hashtbl.add otbl pending_op (oph, Ok op) ;
|
||||||
State.Net.Mempool.add net oph >>= fun add_status ->
|
|
||||||
Assert.is_true ~msg:__LOC__ add_status ;
|
|
||||||
Lwt.return (tbl, vtbl, otbl)
|
Lwt.return (tbl, vtbl, otbl)
|
||||||
|
|
||||||
type state = {
|
type state = {
|
||||||
block: (string, Block_hash.t * Store.block) Hashtbl.t ;
|
block: (string, Block_hash.t * Store.Block_header.t) Hashtbl.t ;
|
||||||
operation: (string, Operation_hash.t * Store.operation tzresult) Hashtbl.t ;
|
operation: (string, Operation_hash.t * Store.Operation.t tzresult) Hashtbl.t ;
|
||||||
vblock: (string, State.Valid_block.t) Hashtbl.t ;
|
vblock: (string, State.Valid_block.t) Hashtbl.t ;
|
||||||
state: State.t ;
|
state: State.t ;
|
||||||
net: State.Net.t ;
|
net: State.Net.t ;
|
||||||
init: unit -> State.t Lwt.t;
|
init: unit -> State.t tzresult Lwt.t;
|
||||||
}
|
}
|
||||||
|
|
||||||
let block s = Hashtbl.find s.block
|
let block s = Hashtbl.find s.block
|
||||||
@ -185,19 +190,16 @@ let rev_find s h =
|
|||||||
with Found s -> s
|
with Found s -> s
|
||||||
|
|
||||||
let blocks s =
|
let blocks s =
|
||||||
Pervasives.(
|
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.block []
|
||||||
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.block []
|
|> List.sort Pervasives.compare
|
||||||
|> List.sort Pervasives.compare)
|
|
||||||
|
|
||||||
let vblocks s =
|
let vblocks s =
|
||||||
Pervasives.(
|
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.vblock []
|
||||||
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.vblock []
|
|> List.sort Pervasives.compare
|
||||||
|> List.sort Pervasives.compare)
|
|
||||||
|
|
||||||
let operations s =
|
let operations s =
|
||||||
Pervasives.(
|
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.operation []
|
||||||
Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.operation []
|
|> List.sort Pervasives.compare
|
||||||
|> List.sort Pervasives.compare)
|
|
||||||
|
|
||||||
let wrap_state_init f base_dir =
|
let wrap_state_init f base_dir =
|
||||||
begin
|
begin
|
||||||
@ -205,46 +207,50 @@ let wrap_state_init f base_dir =
|
|||||||
let context_root = base_dir // "context" in
|
let context_root = base_dir // "context" in
|
||||||
let init () =
|
let init () =
|
||||||
State.read
|
State.read
|
||||||
~ttl:(3600 * 24)
|
|
||||||
~request_operations: (fun _ -> assert false)
|
|
||||||
~request_blocks: (fun _ -> assert false)
|
|
||||||
~request_protocols: (fun _ -> assert false)
|
|
||||||
~store_root
|
~store_root
|
||||||
~context_root
|
~context_root
|
||||||
() in
|
() in
|
||||||
init () >>= fun state ->
|
init () >>=? fun state ->
|
||||||
State.Net.create state genesis >>=? fun net ->
|
State.Net.create state genesis >>= fun net ->
|
||||||
State.Net.activate net ;
|
build_example_tree net >>= fun (block, vblock, operation) ->
|
||||||
build_example_tree state net >>= fun (block, vblock, operation) ->
|
|
||||||
f { state ; net ; block ; vblock ; operation ; init } >>=? fun s ->
|
f { state ; net ; block ; vblock ; operation ; init } >>=? fun s ->
|
||||||
State.shutdown s.state >>= fun () ->
|
|
||||||
return ()
|
return ()
|
||||||
end >>= function
|
end >>= function
|
||||||
| Ok () -> Lwt.return_unit
|
| Ok () -> Lwt.return_unit
|
||||||
| Error err ->
|
| Error err ->
|
||||||
Lwt.return (Error_monad.pp_print_error Format.err_formatter err)
|
Lwt.return (Error_monad.pp_print_error Format.err_formatter err)
|
||||||
|
|
||||||
let save_reload s =
|
|
||||||
State.shutdown s.state >>= fun () ->
|
|
||||||
s.init () >>= fun state ->
|
|
||||||
State.Net.create state genesis >>=? fun net ->
|
|
||||||
let s = { s with state ; net } in
|
|
||||||
return s
|
|
||||||
|
|
||||||
let test_init (s: state) =
|
let test_init (s: state) =
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
let test_read_operation (s: state) =
|
let test_read_operation (s: state) =
|
||||||
Lwt_list.iter_s (fun (name, (oph, op)) ->
|
Lwt_list.iter_s (fun (name, (oph, op)) ->
|
||||||
State.Operation.read s.state oph >>= function
|
State.Operation.invalid s.net oph >>= function
|
||||||
|
| Some err ->
|
||||||
|
begin match op with
|
||||||
|
| Ok _ ->
|
||||||
|
Assert.fail_msg "Incorrect invalid operation read %s" name
|
||||||
|
| Error e ->
|
||||||
|
if e <> err then
|
||||||
|
Assert.fail_msg "Incorrect operation read %s" name ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end
|
||||||
| None ->
|
| None ->
|
||||||
Assert.fail_msg "Cannot read block %s" name
|
State.Operation.read_opt s.net oph >>= function
|
||||||
| Some { Time.data } ->
|
| None ->
|
||||||
if op <> data then
|
Assert.fail_msg "Cannot read block %s" name
|
||||||
Assert.fail_msg "Incorrect operation read %s" name ;
|
| Some data ->
|
||||||
Lwt.return_unit)
|
begin match op with
|
||||||
|
| Error _ ->
|
||||||
|
Assert.fail_msg "Incorrect valid operation read %s" name
|
||||||
|
| Ok op ->
|
||||||
|
if op.Store.Operation.proto <> data.proto then
|
||||||
|
Assert.fail_msg "Incorrect operation read %s %s" name
|
||||||
|
(MBytes.to_string data.Store.Operation.proto) ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end)
|
||||||
(operations s) >>= fun () ->
|
(operations s) >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -255,32 +261,30 @@ let test_read_operation (s: state) =
|
|||||||
let test_read_block (s: state) =
|
let test_read_block (s: state) =
|
||||||
Lwt_list.iter_s (fun (name, (hash, block)) ->
|
Lwt_list.iter_s (fun (name, (hash, block)) ->
|
||||||
begin
|
begin
|
||||||
State.Block.read s.state hash >>= function
|
State.Block_header.read_opt s.net hash >>= function
|
||||||
| None ->
|
| None ->
|
||||||
Assert.fail_msg "Cannot read block %s" name
|
Assert.fail_msg "Cannot read block %s" name
|
||||||
| Some { Time.data = block' ; time } ->
|
| Some block' ->
|
||||||
if not (Store.Block.equal block block') then
|
if not (Store.Block_header.equal block block') then
|
||||||
Assert.fail_msg "Error while reading block %s" name ;
|
Assert.fail_msg "Error while reading block %s" name ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
end >>= fun () ->
|
end >>= fun () ->
|
||||||
let vblock =
|
let vblock =
|
||||||
try Some (vblock s name)
|
try Some (vblock s name)
|
||||||
with Not_found -> None in
|
with Not_found -> None in
|
||||||
State.Valid_block.read s.state hash >>= function
|
State.Valid_block.read s.net hash >>= function
|
||||||
| None ->
|
| Error _ ->
|
||||||
Assert.fail_msg "Cannot read %s" name
|
|
||||||
| Some (Error _) ->
|
|
||||||
if vblock <> None then
|
if vblock <> None then
|
||||||
Assert.fail_msg "Error while reading valid block %s" name ;
|
Assert.fail_msg "Error while reading valid block %s" name ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
| Some (Ok _vblock') ->
|
| Ok _vblock' ->
|
||||||
match vblock with
|
match vblock with
|
||||||
| None ->
|
| None ->
|
||||||
Assert.fail_msg "Error while reading invalid block %s" name
|
Assert.fail_msg "Error while reading invalid block %s" name
|
||||||
| Some _vblock ->
|
| Some _vblock ->
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
) (blocks s) >>= fun () ->
|
) (blocks s) >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -288,14 +292,14 @@ let test_read_block (s: state) =
|
|||||||
(** State.successors *)
|
(** State.successors *)
|
||||||
|
|
||||||
let compare s kind name succs l =
|
let compare s kind name succs l =
|
||||||
if Block_hash_set.cardinal succs <> List.length l then
|
if Block_hash.Set.cardinal succs <> List.length l then
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"unexpected %ssuccessors size (%s: %d %d)"
|
"unexpected %ssuccessors size (%s: %d %d)"
|
||||||
kind name (Block_hash_set.cardinal succs) (List.length l) ;
|
kind name (Block_hash.Set.cardinal succs) (List.length l) ;
|
||||||
List.iter
|
List.iter
|
||||||
(fun bname ->
|
(fun bname ->
|
||||||
let bh = fst @@ block s bname in
|
let bh = fst @@ block s bname in
|
||||||
if not (Block_hash_set.mem bh succs) then
|
if not (Block_hash.Set.mem bh succs) then
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"missing block in %ssuccessors (%s: %s)" kind name bname)
|
"missing block in %ssuccessors (%s: %s)" kind name bname)
|
||||||
l
|
l
|
||||||
@ -303,10 +307,10 @@ let compare s kind name succs l =
|
|||||||
let test_successors s =
|
let test_successors s =
|
||||||
let test s name expected invalid_expected =
|
let test s name expected invalid_expected =
|
||||||
let b = vblock s name in
|
let b = vblock s name in
|
||||||
State.Valid_block.read s.state b.hash >>= function
|
State.Valid_block.read s.net b.hash >>= function
|
||||||
| None | Some (Error _) ->
|
| Error _ ->
|
||||||
Assert.fail_msg "Failed while reading block %s" name
|
Assert.fail_msg "Failed while reading block %s" name
|
||||||
| Some (Ok { successors ; invalid_successors}) ->
|
| Ok { successors ; invalid_successors } ->
|
||||||
compare s "" name successors expected ;
|
compare s "" name successors expected ;
|
||||||
compare s "invalid " name invalid_successors invalid_expected ;
|
compare s "invalid " name invalid_successors invalid_expected ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
@ -317,7 +321,7 @@ let test_successors s =
|
|||||||
test s "A8" [] [] >>= fun () ->
|
test s "A8" [] [] >>= fun () ->
|
||||||
test s "B1" ["B2"] [] >>= fun () ->
|
test s "B1" ["B2"] [] >>= fun () ->
|
||||||
test s "B7" ["B8"] ["C1"] >>= fun () ->
|
test s "B7" ["B8"] ["C1"] >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -331,24 +335,27 @@ let rec compare_path p1 p2 = match p1, p2 with
|
|||||||
|
|
||||||
let test_path (s: state) =
|
let test_path (s: state) =
|
||||||
let check_path h1 h2 p2 =
|
let check_path h1 h2 p2 =
|
||||||
State.Block.path s.state (fst @@ block s h1) (fst @@ block s h2) >>= function
|
State.Block_header.Helpers.path s.net (fst @@ block s h1) (fst @@ block s h2) >>= function
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
Assert.fail_msg "cannot compute path %s -> %s" h1 h2
|
Assert.fail_msg "cannot compute path %s -> %s" h1 h2
|
||||||
| Ok p1 ->
|
| Ok p1 ->
|
||||||
|
let p1 = List.map (fun b -> fst b) p1 in
|
||||||
let p2 = List.map (fun b -> fst (block s b)) p2 in
|
let p2 = List.map (fun b -> fst (block s b)) p2 in
|
||||||
if not (compare_path p1 p2) then
|
if not (compare_path p1 p2) then
|
||||||
Assert.fail_msg "bad path %s -> %s" h1 h2 ;
|
Assert.fail_msg "bad path %s -> %s" h1 h2 ;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
|
check_path "Genesis" "Genesis" [] >>= fun () ->
|
||||||
|
check_path "A1" "A1" [] >>= fun () ->
|
||||||
check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] >>= fun () ->
|
check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] >>= fun () ->
|
||||||
check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] >>= fun () ->
|
check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] >>= fun () ->
|
||||||
check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () ->
|
check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () ->
|
||||||
check_path "A1" "C2" ["A2"; "A3"; "B1"; "B2"; "B3" ; "B4" ;
|
check_path "A1" "C2" ["A2"; "A3"; "B1"; "B2"; "B3" ; "B4" ;
|
||||||
"B5" ; "B6" ; "B7" ; "C1" ; "C2" ] >>= fun () ->
|
"B5" ; "B6" ; "B7" ; "C1" ; "C2" ] >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
let test_valid_path (s: state) =
|
let test_valid_path (s: state) =
|
||||||
let check_path h1 h2 p2 =
|
let check_path h1 h2 p2 =
|
||||||
State.Valid_block.path s.state (vblock s h1) (vblock s h2) >>= function
|
State.Valid_block.Helpers.path s.net (vblock s h1) (vblock s h2) >>= function
|
||||||
| None ->
|
| None ->
|
||||||
Assert.fail_msg "cannot compute path %s -> %s" h1 h2 ;
|
Assert.fail_msg "cannot compute path %s -> %s" h1 h2 ;
|
||||||
| Some (p: State.Valid_block.t list) ->
|
| Some (p: State.Valid_block.t list) ->
|
||||||
@ -357,10 +364,12 @@ let test_valid_path (s: state) =
|
|||||||
if not (compare_path p p2) then
|
if not (compare_path p p2) then
|
||||||
Assert.fail_msg "bad path %s -> %s" h1 h2 ;
|
Assert.fail_msg "bad path %s -> %s" h1 h2 ;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
|
check_path "Genesis" "Genesis" [] >>= fun () ->
|
||||||
|
check_path "A1" "A1" [] >>= fun () ->
|
||||||
check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] >>= fun () ->
|
check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] >>= fun () ->
|
||||||
check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] >>= fun () ->
|
check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] >>= fun () ->
|
||||||
check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () ->
|
check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -369,22 +378,28 @@ let test_valid_path (s: state) =
|
|||||||
|
|
||||||
let test_ancestor s =
|
let test_ancestor s =
|
||||||
let check_ancestor h1 h2 expected =
|
let check_ancestor h1 h2 expected =
|
||||||
State.Block.common_ancestor
|
State.Block_header.Helpers.common_ancestor
|
||||||
s.state (fst @@ block s h1) (fst @@ block s h2) >>= function
|
s.net (fst @@ block s h1) (fst @@ block s h2) >>= function
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
Assert.fail_msg "Cannot compure ancestor for %s %s" h1 h2 ;
|
Assert.fail_msg "Cannot compure ancestor for %s %s" h1 h2 ;
|
||||||
| Ok a ->
|
| Ok (a, _) ->
|
||||||
if not (Block_hash.equal a (fst expected)) then
|
if not (Block_hash.equal a (fst expected)) then
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"bad ancestor %s %s: found %s, expected %s"
|
"bad ancestor %s %s: found %s, expected %s"
|
||||||
h1 h2 (rev_find s a) (rev_find s @@ fst expected) ;
|
h1 h2 (rev_find s a) (rev_find s @@ fst expected) ;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
let check_valid_ancestor h1 h2 expected =
|
let check_valid_ancestor h1 h2 expected =
|
||||||
State.Valid_block.common_ancestor
|
State.Valid_block.Helpers.common_ancestor
|
||||||
s.state (vblock s h1) (vblock s h2) >>= fun a ->
|
s.net (vblock s h1) (vblock s h2) >>= fun a ->
|
||||||
if not (Block_hash.equal a.hash expected.State.Valid_block.hash) then
|
if not (Block_hash.equal a.hash expected.State.Valid_block.hash) then
|
||||||
Assert.fail_msg "bad ancestor %s %s" h1 h2 ;
|
Assert.fail_msg "bad ancestor %s %s" h1 h2 ;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
|
check_ancestor "Genesis" "Genesis" (block s "Genesis") >>= fun () ->
|
||||||
|
check_ancestor "Genesis" "A3" (block s "Genesis") >>= fun () ->
|
||||||
|
check_ancestor "A3" "Genesis" (block s "Genesis") >>= fun () ->
|
||||||
|
check_ancestor "A1" "A1" (block s "A1") >>= fun () ->
|
||||||
|
check_ancestor "A1" "A3" (block s "A1") >>= fun () ->
|
||||||
|
check_ancestor "A3" "A1" (block s "A1") >>= fun () ->
|
||||||
check_ancestor "A6" "B6" (block s "A3") >>= fun () ->
|
check_ancestor "A6" "B6" (block s "A3") >>= fun () ->
|
||||||
check_ancestor "B6" "A6" (block s "A3") >>= fun () ->
|
check_ancestor "B6" "A6" (block s "A3") >>= fun () ->
|
||||||
check_ancestor "A4" "B1" (block s "A3") >>= fun () ->
|
check_ancestor "A4" "B1" (block s "A3") >>= fun () ->
|
||||||
@ -405,7 +420,7 @@ let test_ancestor s =
|
|||||||
check_valid_ancestor "B1" "A3" (vblock s "A3") >>= fun () ->
|
check_valid_ancestor "B1" "A3" (vblock s "A3") >>= fun () ->
|
||||||
check_valid_ancestor "A2" "B1" (vblock s "A2") >>= fun () ->
|
check_valid_ancestor "A2" "B1" (vblock s "A2") >>= fun () ->
|
||||||
check_valid_ancestor "B1" "A2" (vblock s "A2") >>= fun () ->
|
check_valid_ancestor "B1" "A2" (vblock s "A2") >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -414,8 +429,8 @@ let test_ancestor s =
|
|||||||
|
|
||||||
let test_locator s =
|
let test_locator s =
|
||||||
let check_locator h1 expected =
|
let check_locator h1 expected =
|
||||||
State.Block.block_locator
|
State.Block_header.Helpers.block_locator
|
||||||
s.state (List.length expected) (fst @@ block s h1) >>= function
|
s.net (List.length expected) (fst @@ block s h1) >>= function
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
Assert.fail_msg "Cannot compute locator for %s" h1
|
Assert.fail_msg "Cannot compute locator for %s" h1
|
||||||
| Ok l ->
|
| Ok l ->
|
||||||
@ -430,8 +445,8 @@ let test_locator s =
|
|||||||
l expected;
|
l expected;
|
||||||
Lwt.return_unit in
|
Lwt.return_unit in
|
||||||
let check_valid_locator h1 expected =
|
let check_valid_locator h1 expected =
|
||||||
State.Valid_block.block_locator
|
State.Valid_block.Helpers.block_locator
|
||||||
s.state (List.length expected) (vblock s h1) >>= fun l ->
|
s.net (List.length expected) (vblock s h1) >>= fun l ->
|
||||||
if List.length l <> List.length expected then
|
if List.length l <> List.length expected then
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"Invalid locator length %s (found: %d, expected: %d)"
|
"Invalid locator length %s (found: %d, expected: %d)"
|
||||||
@ -454,7 +469,7 @@ let test_locator s =
|
|||||||
check_valid_locator "B8"
|
check_valid_locator "B8"
|
||||||
["B8";"B7";"B6";"B5";"B4";"B3";"B2";"B1";"A3"] >>= fun () ->
|
["B8";"B7";"B6";"B5";"B4";"B3";"B2";"B1";"A3"] >>= fun () ->
|
||||||
check_valid_locator "B8" ["B8";"B7";"B6";"B5";"B4"] >>= fun () ->
|
check_valid_locator "B8" ["B8";"B7";"B6";"B5";"B4"] >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -462,25 +477,21 @@ let test_locator s =
|
|||||||
(** State.known_heads *)
|
(** State.known_heads *)
|
||||||
|
|
||||||
let compare s name heads l =
|
let compare s name heads l =
|
||||||
if Block_hash_map.cardinal heads <> List.length l then
|
if List.length heads <> List.length l then
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"unexpected known_heads size (%s: %d %d)"
|
"unexpected known_heads size (%s: %d %d)"
|
||||||
name (Block_hash_map.cardinal heads) (List.length l) ;
|
name (List.length heads) (List.length l) ;
|
||||||
List.iter
|
List.iter
|
||||||
(fun bname ->
|
(fun bname ->
|
||||||
let hash = (vblock s bname).hash in
|
let hash = (vblock s bname).hash in
|
||||||
if not (Block_hash_map.mem hash heads) then
|
if not (List.exists (fun b -> Block_hash.equal hash b.State.Valid_block.hash) heads) then
|
||||||
Assert.fail_msg "missing block in known_heads (%s: %s)" name bname)
|
Assert.fail_msg "missing block in known_heads (%s: %s)" name bname)
|
||||||
l
|
l
|
||||||
|
|
||||||
let test_known_heads s =
|
let test_known_heads s =
|
||||||
State.Valid_block.known_heads s.state >>= fun heads ->
|
State.Valid_block.known_heads s.net >>= fun heads ->
|
||||||
compare s "initial" heads ["A8";"B8"] ;
|
compare s "initial" heads ["A8";"B8"] ;
|
||||||
State.shutdown s.state >>= fun () ->
|
return ()
|
||||||
s.init () >>= fun state ->
|
|
||||||
let s = { s with state } in
|
|
||||||
compare s "initial" heads ["A8";"B8"] ;
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -488,18 +499,14 @@ let test_known_heads s =
|
|||||||
(** State.head/set_head *)
|
(** State.head/set_head *)
|
||||||
|
|
||||||
let test_head s =
|
let test_head s =
|
||||||
State.Net.Blockchain.head s.net >>= fun head ->
|
State.Valid_block.Current.head s.net >>= fun head ->
|
||||||
if not (Block_hash.equal head.hash genesis_block) then
|
if not (Block_hash.equal head.hash genesis_block) then
|
||||||
Assert.fail_msg "unexpected head" ;
|
Assert.fail_msg "unexpected head" ;
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A6") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A6") >>= fun _ ->
|
||||||
State.Net.Blockchain.head s.net >>= fun head ->
|
State.Valid_block.Current.head s.net >>= fun head ->
|
||||||
if not (Block_hash.equal head.hash (vblock s "A6").hash) then
|
if not (Block_hash.equal head.hash (vblock s "A6").hash) then
|
||||||
Assert.fail_msg "unexpected head" ;
|
Assert.fail_msg "unexpected head" ;
|
||||||
save_reload s >>=? fun s ->
|
return ()
|
||||||
State.Net.Blockchain.head s.net >>= fun head ->
|
|
||||||
if not (Block_hash.equal head.hash (vblock s "A6").hash) then
|
|
||||||
Assert.fail_msg "unexpected head" ;
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -508,7 +515,7 @@ let test_head s =
|
|||||||
|
|
||||||
let test_mem s =
|
let test_mem s =
|
||||||
let mem s x =
|
let mem s x =
|
||||||
State.Net.Blockchain.mem s.net (fst @@ block s x) in
|
State.Valid_block.Current.mem s.net (fst @@ block s x) in
|
||||||
let test_mem s x =
|
let test_mem s x =
|
||||||
mem s x >>= function
|
mem s x >>= function
|
||||||
| true -> Lwt.return_unit
|
| true -> Lwt.return_unit
|
||||||
@ -523,21 +530,21 @@ let test_mem s =
|
|||||||
test_not_mem s "B1" >>= fun () ->
|
test_not_mem s "B1" >>= fun () ->
|
||||||
test_not_mem s "B6" >>= fun () ->
|
test_not_mem s "B6" >>= fun () ->
|
||||||
test_not_mem s "B8" >>= fun () ->
|
test_not_mem s "B8" >>= fun () ->
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A8") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A8") >>= fun _ ->
|
||||||
test_mem s "A3" >>= fun () ->
|
test_mem s "A3" >>= fun () ->
|
||||||
test_mem s "A6" >>= fun () ->
|
test_mem s "A6" >>= fun () ->
|
||||||
test_mem s "A8" >>= fun () ->
|
test_mem s "A8" >>= fun () ->
|
||||||
test_not_mem s "B1" >>= fun () ->
|
test_not_mem s "B1" >>= fun () ->
|
||||||
test_not_mem s "B6" >>= fun () ->
|
test_not_mem s "B6" >>= fun () ->
|
||||||
test_not_mem s "B8" >>= fun () ->
|
test_not_mem s "B8" >>= fun () ->
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A6") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A6") >>= fun _ ->
|
||||||
test_mem s "A3" >>= fun () ->
|
test_mem s "A3" >>= fun () ->
|
||||||
test_mem s "A6" >>= fun () ->
|
test_mem s "A6" >>= fun () ->
|
||||||
test_not_mem s "A8" >>= fun () ->
|
test_not_mem s "A8" >>= fun () ->
|
||||||
test_not_mem s "B1" >>= fun () ->
|
test_not_mem s "B1" >>= fun () ->
|
||||||
test_not_mem s "B6" >>= fun () ->
|
test_not_mem s "B6" >>= fun () ->
|
||||||
test_not_mem s "B8" >>= fun () ->
|
test_not_mem s "B8" >>= fun () ->
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "B6") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "B6") >>= fun _ ->
|
||||||
test_mem s "A3" >>= fun () ->
|
test_mem s "A3" >>= fun () ->
|
||||||
test_not_mem s "A4" >>= fun () ->
|
test_not_mem s "A4" >>= fun () ->
|
||||||
test_not_mem s "A6" >>= fun () ->
|
test_not_mem s "A6" >>= fun () ->
|
||||||
@ -545,7 +552,7 @@ let test_mem s =
|
|||||||
test_mem s "B1" >>= fun () ->
|
test_mem s "B1" >>= fun () ->
|
||||||
test_mem s "B6" >>= fun () ->
|
test_mem s "B6" >>= fun () ->
|
||||||
test_not_mem s "B8" >>= fun () ->
|
test_not_mem s "B8" >>= fun () ->
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "B8") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "B8") >>= fun _ ->
|
||||||
test_mem s "A3" >>= fun () ->
|
test_mem s "A3" >>= fun () ->
|
||||||
test_not_mem s "A4" >>= fun () ->
|
test_not_mem s "A4" >>= fun () ->
|
||||||
test_not_mem s "A6" >>= fun () ->
|
test_not_mem s "A6" >>= fun () ->
|
||||||
@ -553,11 +560,7 @@ let test_mem s =
|
|||||||
test_mem s "B1" >>= fun () ->
|
test_mem s "B1" >>= fun () ->
|
||||||
test_mem s "B6" >>= fun () ->
|
test_mem s "B6" >>= fun () ->
|
||||||
test_mem s "B8" >>= fun () ->
|
test_mem s "B8" >>= fun () ->
|
||||||
save_reload s >>=? fun s ->
|
return ()
|
||||||
State.Net.Blockchain.head s.net >>= fun head ->
|
|
||||||
if not (Block_hash.equal head.hash (vblock s "B8").hash) then
|
|
||||||
Assert.fail_msg "Invalid head after save/load" ;
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -566,8 +569,8 @@ let test_mem s =
|
|||||||
|
|
||||||
let test_new s =
|
let test_new s =
|
||||||
let test s h expected =
|
let test s h expected =
|
||||||
State.Valid_block.block_locator s.state 50 (vblock s h) >>= fun loc ->
|
State.Valid_block.Helpers.block_locator s.net 50 (vblock s h) >>= fun loc ->
|
||||||
State.Net.Blockchain.find_new s.net loc (List.length expected) >>= function
|
State.Valid_block.Current.find_new s.net loc (List.length expected) >>= function
|
||||||
| Error _ ->
|
| Error _ ->
|
||||||
Assert.fail_msg "Failed to compute new blocks %s" h
|
Assert.fail_msg "Failed to compute new blocks %s" h
|
||||||
| Ok blocks ->
|
| Ok blocks ->
|
||||||
@ -583,12 +586,12 @@ let test_new s =
|
|||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
in
|
in
|
||||||
test s "A6" [] >>= fun () ->
|
test s "A6" [] >>= fun () ->
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A8") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A8") >>= fun _ ->
|
||||||
test s "A6" ["A7";"A8"] >>= fun () ->
|
test s "A6" ["A7";"A8"] >>= fun () ->
|
||||||
test s "A6" ["A7"] >>= fun () ->
|
test s "A6" ["A7"] >>= fun () ->
|
||||||
test s "B4" ["A4"] >>= fun () ->
|
test s "B4" ["A4"] >>= fun () ->
|
||||||
test s "B7" ["A4";"A5";"A6";"A7"] >>= fun () ->
|
test s "B7" ["A4";"A5";"A6";"A7"] >>= fun () ->
|
||||||
return s
|
return ()
|
||||||
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
@ -596,7 +599,7 @@ let test_new s =
|
|||||||
(** State.mempool *)
|
(** State.mempool *)
|
||||||
|
|
||||||
let compare s name mempool l =
|
let compare s name mempool l =
|
||||||
let mempool_sz = Operation_hash_set.cardinal mempool in
|
let mempool_sz = Operation_hash.Set.cardinal mempool in
|
||||||
let l_sz = List.length l in
|
let l_sz = List.length l in
|
||||||
if mempool_sz <> l_sz then
|
if mempool_sz <> l_sz then
|
||||||
Assert.fail
|
Assert.fail
|
||||||
@ -607,57 +610,48 @@ let compare s name mempool l =
|
|||||||
(fun oname ->
|
(fun oname ->
|
||||||
try
|
try
|
||||||
let oph = fst @@ operation s oname in
|
let oph = fst @@ operation s oname in
|
||||||
if not (Operation_hash_set.mem oph mempool) then
|
if not (Operation_hash.Set.mem oph mempool) then
|
||||||
Assert.fail_msg "missing operation in mempool (%s: %s)" name oname
|
Assert.fail_msg "missing operation in mempool (%s: %s)" name oname
|
||||||
with Not_found ->
|
with Not_found ->
|
||||||
Assert.fail_msg "Read value not found in mempool (%s: %s)" name oname)
|
Assert.fail_msg "Read value not found in mempool (%s: %s)" name oname)
|
||||||
l
|
l
|
||||||
|
|
||||||
let test_mempool s =
|
let test_mempool s =
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
State.Operation.list_pending s.net >>= fun mempool ->
|
||||||
compare s "initial" mempool
|
compare s "initial" mempool
|
||||||
["PP";
|
["PP";
|
||||||
"A1" ; "A2" ; "A3" ; "A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
"A1" ; "A2" ; "A3" ; "A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
||||||
"B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
"B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A8") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A8") >>= fun _ ->
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
State.Operation.list_pending s.net >>= fun mempool ->
|
||||||
compare s "A8" mempool
|
compare s "A8" mempool
|
||||||
["PP"; "B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
["PP"; "B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "A6") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "A6") >>= fun _ ->
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
State.Operation.list_pending s.net >>= fun mempool ->
|
||||||
compare s "A6" mempool
|
compare s "A6" mempool
|
||||||
["PP";
|
["PP";
|
||||||
"A7" ; "A8" ;
|
"A7" ; "A8" ;
|
||||||
"B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
"B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
||||||
State.Net.Blockchain.set_head s.net (vblock s "B6") >>= fun _ ->
|
State.Valid_block.Current.set_head s.net (vblock s "B6") >>= fun _ ->
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
State.Operation.list_pending s.net >>= fun mempool ->
|
||||||
compare s "B6" mempool
|
compare s "B6" mempool
|
||||||
["PP";
|
["PP";
|
||||||
"A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
"A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
||||||
"B7" ; "B8" ] ;
|
"B7" ; "B8" ] ;
|
||||||
State.Net.Mempool.remove s.net (fst @@ operation s "PP") >>= fun rm_status ->
|
State.Operation.mark_invalid s.net (fst @@ operation s "PP") [] >>= fun rm_status ->
|
||||||
Assert.is_true ~msg:__LOC__ rm_status ;
|
Assert.is_true ~msg:__LOC__ rm_status ;
|
||||||
State.Net.Mempool.remove s.net (fst @@ operation s "PP") >>= fun rm_status ->
|
State.Operation.mark_invalid s.net (fst @@ operation s "PP") [] >>= fun rm_status ->
|
||||||
Assert.is_false ~msg:__LOC__ rm_status ;
|
Assert.is_false ~msg:__LOC__ rm_status ;
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
State.Operation.list_pending s.net >>= fun mempool ->
|
||||||
compare s "B6.remove" mempool
|
compare s "B6.remove" mempool
|
||||||
["A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
["A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
||||||
"B7" ; "B8" ] ;
|
"B7" ; "B8" ] ;
|
||||||
save_reload s >>=? fun s ->
|
return ()
|
||||||
State.Net.Mempool.get s.net >>= fun mempool ->
|
|
||||||
compare s "B6.saved" mempool
|
|
||||||
["A4" ; "A5" ; "A6" ; "A7" ; "A8" ;
|
|
||||||
"B7" ; "B8" ] ;
|
|
||||||
State.Net.Mempool.for_block s.net (vblock s "A4") >>= fun mempool ->
|
|
||||||
compare s "A4.for_block" mempool
|
|
||||||
["A5" ; "A6" ; "A7" ; "A8" ;
|
|
||||||
"B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] ;
|
|
||||||
return s
|
|
||||||
|
|
||||||
(****************************************************************************)
|
(****************************************************************************)
|
||||||
|
|
||||||
|
|
||||||
let tests : (string * (state -> state tzresult Lwt.t)) list = [
|
let tests : (string * (state -> unit tzresult Lwt.t)) list = [
|
||||||
"init", test_init ;
|
"init", test_init ;
|
||||||
"read_operation", test_read_operation;
|
"read_operation", test_read_operation;
|
||||||
"read_block", test_read_block ;
|
"read_block", test_read_block ;
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Error_monad
|
||||||
open Hash
|
open Hash
|
||||||
open Store
|
open Store
|
||||||
|
|
||||||
@ -28,7 +29,7 @@ let genesis_time =
|
|||||||
Time.of_seconds 0L
|
Time.of_seconds 0L
|
||||||
|
|
||||||
let genesis = {
|
let genesis = {
|
||||||
Store.time = genesis_time ;
|
State.Net.time = genesis_time ;
|
||||||
block = genesis_block ;
|
block = genesis_block ;
|
||||||
protocol = genesis_protocol ;
|
protocol = genesis_protocol ;
|
||||||
}
|
}
|
||||||
@ -37,15 +38,28 @@ let genesis = {
|
|||||||
|
|
||||||
let wrap_store_init f base_dir =
|
let wrap_store_init f base_dir =
|
||||||
let root = base_dir // "store" in
|
let root = base_dir // "store" in
|
||||||
Store.init root >>= fun store ->
|
Store.init root >>= function
|
||||||
f store
|
| Ok store -> f store
|
||||||
|
| Error err ->
|
||||||
|
Format.kasprintf Pervasives.failwith
|
||||||
|
"@[Cannot initialize store:@ %a@]" pp_print_error err
|
||||||
|
|
||||||
|
let wrap_raw_store_init f base_dir =
|
||||||
|
let root = base_dir // "store" in
|
||||||
|
Raw_store.init root >>= function
|
||||||
|
| Ok store -> f store
|
||||||
|
| Error err ->
|
||||||
|
Format.kasprintf Pervasives.failwith
|
||||||
|
"@[Cannot initialize store:@ %a@]" pp_print_error err
|
||||||
|
|
||||||
let test_init _ = Lwt.return_unit
|
let test_init _ = Lwt.return_unit
|
||||||
|
|
||||||
|
let net_id = State.Net_id.Id genesis_block
|
||||||
|
|
||||||
(** Operation store *)
|
(** Operation store *)
|
||||||
|
|
||||||
let make proto : Store.operation =
|
let make proto : Store.Operation.t =
|
||||||
{ shell = { net_id = Net genesis_block } ; proto }
|
{ shell = { net_id } ; proto }
|
||||||
|
|
||||||
let op1 = make (MBytes.of_string "Capadoce")
|
let op1 = make (MBytes.of_string "Capadoce")
|
||||||
let oph1 = Operation.hash op1
|
let oph1 = Operation.hash op1
|
||||||
@ -53,51 +67,48 @@ let op2 = make (MBytes.of_string "Kivu")
|
|||||||
let oph2 = Operation.hash op2
|
let oph2 = Operation.hash op2
|
||||||
|
|
||||||
let check_operation s h b =
|
let check_operation s h b =
|
||||||
Operation.get s h >>= function
|
Operation.Contents.read (s, h) >>= function
|
||||||
| Some { Time.data = Ok b' } when Operation.equal b b' -> Lwt.return_unit
|
| Ok b' when Operation.equal b b' -> Lwt.return_unit
|
||||||
| _ ->
|
| _ ->
|
||||||
Printf.eprintf "Error while reading operation %s\n%!"
|
Printf.eprintf "Error while reading operation %s\n%!"
|
||||||
(Operation_hash.to_hex h);
|
(Operation_hash.to_hex h);
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
let test_operation s =
|
let test_operation s =
|
||||||
Persist.use s.operation (fun s ->
|
let s = Store.Net.get s net_id in
|
||||||
Operation.set s oph1 (Time.make_timed (Ok op1)) >>= fun () ->
|
let s = Store.Operation.get s in
|
||||||
Operation.set s oph2 (Time.make_timed (Ok op2)) >>= fun () ->
|
Operation.Contents.store (s, oph1) op1 >>= fun () ->
|
||||||
check_operation s oph1 op1 >>= fun () ->
|
Operation.Contents.store (s, oph2) op2 >>= fun () ->
|
||||||
check_operation s oph2 op2)
|
check_operation s oph1 op1 >>= fun () ->
|
||||||
|
check_operation s oph2 op2
|
||||||
|
|
||||||
(** Block store *)
|
(** Block store *)
|
||||||
|
|
||||||
let lolblock ?(operations = []) header =
|
let lolblock ?(operations = []) header =
|
||||||
{ Time.time = Time.of_seconds (Random.int64 1500L) ;
|
{ Store.Block_header.shell =
|
||||||
data =
|
{ timestamp = Time.of_seconds (Random.int64 1500L) ;
|
||||||
{ shell =
|
net_id ;
|
||||||
{ timestamp = Time.of_seconds (Random.int64 1500L) ;
|
predecessor = genesis_block ; operations;
|
||||||
net_id = Store.Net genesis_block ;
|
fitness = [MBytes.of_string @@ string_of_int @@ String.length header;
|
||||||
predecessor = genesis_block ; operations;
|
MBytes.of_string @@ string_of_int @@ 12] } ;
|
||||||
fitness = [MBytes.of_string @@ string_of_int @@ String.length header;
|
proto = MBytes.of_string header ;
|
||||||
MBytes.of_string @@ string_of_int @@ 12] } ;
|
|
||||||
proto = MBytes.of_string header ;
|
|
||||||
} ;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let b1 = lolblock "Blop !"
|
let b1 = lolblock "Blop !"
|
||||||
let bh1 = Store.Block.hash b1.data
|
let bh1 = Store.Block_header.hash b1
|
||||||
let b2 = lolblock "Tacatlopo"
|
let b2 = lolblock "Tacatlopo"
|
||||||
let bh2 = Store.Block.hash b2.data
|
let bh2 = Store.Block_header.hash b2
|
||||||
let b3 = lolblock ~operations:[oph1;oph2] "Persil"
|
let b3 = lolblock ~operations:[oph1;oph2] "Persil"
|
||||||
let bh3 = Store.Block.hash b3.data
|
let bh3 = Store.Block_header.hash b3
|
||||||
let bh3' =
|
let bh3' =
|
||||||
let raw = Bytes.of_string @@ Block_hash.to_string bh3 in
|
let raw = Bytes.of_string @@ Block_hash.to_string bh3 in
|
||||||
Bytes.set raw 31 '\000' ;
|
Bytes.set raw 31 '\000' ;
|
||||||
Bytes.set raw 30 '\000' ;
|
Bytes.set raw 30 '\000' ;
|
||||||
Block_hash.of_string @@ Bytes.to_string raw
|
Block_hash.of_string_exn @@ Bytes.to_string raw
|
||||||
|
|
||||||
let check_block s h b =
|
let check_block s h b =
|
||||||
Block.full_get s h >>= function
|
Block_header.Contents.read_opt (s, h) >>= function
|
||||||
| Some b' when Store.Block.equal b.Time.data b'.Time.data
|
| Some b' when Store.Block_header.equal b b' -> Lwt.return_unit
|
||||||
&& Time.equal b.time b'.time -> Lwt.return_unit
|
|
||||||
| Some b' ->
|
| Some b' ->
|
||||||
Printf.eprintf "Error while reading block %s\n%!" (Block_hash.to_hex h);
|
Printf.eprintf "Error while reading block %s\n%!" (Block_hash.to_hex h);
|
||||||
exit 1
|
exit 1
|
||||||
@ -106,163 +117,319 @@ let check_block s h b =
|
|||||||
(Block_hash.to_hex h);
|
(Block_hash.to_hex h);
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
let test_block (s: Store.store) =
|
let test_block s =
|
||||||
Persist.use s.block (fun s ->
|
let s = Store.Net.get s net_id in
|
||||||
Block.full_set s bh1 b1 >>= fun () ->
|
let s = Store.Block_header.get s in
|
||||||
Block.full_set s bh2 b2 >>= fun () ->
|
Block_header.Contents.store (s, bh1) b1 >>= fun () ->
|
||||||
Block.full_set s bh3 b3 >>= fun () ->
|
Block_header.Contents.store (s, bh2) b2 >>= fun () ->
|
||||||
check_block s bh1 b1 >>= fun () ->
|
Block_header.Contents.store (s, bh3) b3 >>= fun () ->
|
||||||
check_block s bh2 b2 >>= fun () ->
|
check_block s bh1 b1 >>= fun () ->
|
||||||
check_block s bh3 b3)
|
check_block s bh2 b2 >>= fun () ->
|
||||||
|
check_block s bh3 b3
|
||||||
|
|
||||||
let test_expand (s: Store.store) =
|
let test_expand s =
|
||||||
Persist.use s.block (fun s ->
|
let s = Store.Net.get s net_id in
|
||||||
Block.full_set s bh1 b1 >>= fun () ->
|
let s = Store.Block_header.get s in
|
||||||
Block.full_set s bh2 b2 >>= fun () ->
|
Block_header.Contents.store (s, bh1) b1 >>= fun () ->
|
||||||
Block.full_set s bh3 b3 >>= fun () ->
|
Block_header.Contents.store (s, bh2) b2 >>= fun () ->
|
||||||
Block.full_set s bh3' b3 >>= fun () ->
|
Block_header.Contents.store (s, bh3) b3 >>= fun () ->
|
||||||
Base58.complete (Block_hash.to_short_b58check bh1) >>= fun res ->
|
Block_header.Contents.store (s, bh3') b3 >>= fun () ->
|
||||||
Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh1] ;
|
Base58.complete (Block_hash.to_short_b58check bh1) >>= fun res ->
|
||||||
Base58.complete (Block_hash.to_short_b58check bh2) >>= fun res ->
|
Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh1] ;
|
||||||
Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh2] ;
|
Base58.complete (Block_hash.to_short_b58check bh2) >>= fun res ->
|
||||||
Base58.complete (Block_hash.to_short_b58check bh3) >>= fun res ->
|
Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh2] ;
|
||||||
Assert.equal_string_list ~msg:__LOC__ res
|
Base58.complete (Block_hash.to_short_b58check bh3) >>= fun res ->
|
||||||
[Block_hash.to_b58check bh3' ; Block_hash.to_b58check bh3] ;
|
Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh3] ;
|
||||||
Lwt.return_unit)
|
Lwt.return_unit
|
||||||
|
|
||||||
|
|
||||||
(** Generic store *)
|
(** Generic store *)
|
||||||
|
|
||||||
let check s k d =
|
let check (type t)
|
||||||
get s k >|= fun d' ->
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) k d =
|
||||||
|
Store.read_opt s k >|= fun d' ->
|
||||||
if d' <> Some d then begin
|
if d' <> Some d then begin
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"Error while reading key %S\n%!" (String.concat Filename.dir_sep k) ;
|
"Error while reading key %S\n%!" (String.concat Filename.dir_sep k) ;
|
||||||
end
|
end
|
||||||
|
|
||||||
let check_none s k =
|
let check_none (type t)
|
||||||
get s k >|= function
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) k =
|
||||||
|
Store.read_opt s k >|= function
|
||||||
| None -> ()
|
| None -> ()
|
||||||
| Some _ ->
|
| Some _ ->
|
||||||
Assert.fail_msg
|
Assert.fail_msg
|
||||||
"Error while reading non-existent key %S\n%!"
|
"Error while reading non-existent key %S\n%!"
|
||||||
(String.concat Filename.dir_sep k)
|
(String.concat Filename.dir_sep k)
|
||||||
|
|
||||||
let test_generic (s: Store.store) =
|
let test_generic (type t)
|
||||||
Persist.use s.global_store (fun s ->
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) =
|
||||||
set s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
Store.store s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
||||||
set s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () ->
|
Store.store s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () ->
|
||||||
set s ["day";"truc";"chose"] (MBytes.of_string "Vendredi") >>= fun () ->
|
Store.store s ["day";"truc";"chose"] (MBytes.of_string "Vendredi") >>= fun () ->
|
||||||
check s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
check (module Store) s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
||||||
check s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () ->
|
check (module Store) s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () ->
|
||||||
check_none s ["day"])
|
check_none (module Store) s ["day"]
|
||||||
|
|
||||||
let test_generic_list (s: Store.store) =
|
let list (type t)
|
||||||
Persist.use s.global_store (fun s ->
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) k =
|
||||||
set s ["a"; "b"] (MBytes.of_string "Novembre") >>= fun () ->
|
Store.fold_keys s k ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc))
|
||||||
set s ["a"; "c"] (MBytes.of_string "Juin") >>= fun () ->
|
|
||||||
set s ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun () ->
|
let test_generic_list (type t)
|
||||||
set s ["f";] (MBytes.of_string "Avril") >>= fun () ->
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) =
|
||||||
set s ["g"; "h"] (MBytes.of_string "Avril") >>= fun () ->
|
Store.store s ["a"; "b"] (MBytes.of_string "Novembre") >>= fun () ->
|
||||||
list s [] >>= fun l ->
|
Store.store s ["a"; "c"] (MBytes.of_string "Juin") >>= fun () ->
|
||||||
Assert.equal_persist_list ~msg:__LOC__ [] l ;
|
Store.store s ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun () ->
|
||||||
list s [[]] >>= fun l ->
|
Store.store s ["f";] (MBytes.of_string "Avril") >>= fun () ->
|
||||||
Assert.equal_persist_list
|
Store.store s ["g"; "h"] (MBytes.of_string "Avril") >>= fun () ->
|
||||||
~msg:__LOC__ [["a"];["f"];["g"];["version"]] l ;
|
list (module Store) s [] >>= fun l ->
|
||||||
list s [["a"]] >>= fun l ->
|
Assert.equal_persist_list ~msg:__LOC__
|
||||||
Assert.equal_persist_list
|
[["a";"b"];["a";"c"];["a";"d";"e"];["f"];["g";"h"]]
|
||||||
~msg:__LOC__ [["a";"b"]; ["a";"c"]; ["a";"d"]] l ;
|
(List.sort compare l) ;
|
||||||
list s [["f"]] >>= fun l ->
|
list (module Store) s ["a"] >>= fun l ->
|
||||||
Assert.equal_persist_list ~msg:__LOC__ [] l ;
|
Assert.equal_persist_list
|
||||||
list s [["g"]] >>= fun l ->
|
~msg:__LOC__ [["a";"b"]; ["a";"c"]; ["a";"d";"e"]]
|
||||||
Assert.equal_persist_list ~msg:__LOC__ [["g";"h"]] l ;
|
(List.sort compare l) ;
|
||||||
list s [["i"]] >>= fun l ->
|
list (module Store) s ["f"] >>= fun l ->
|
||||||
Assert.equal_persist_list ~msg:__LOC__ [] l ;
|
Assert.equal_persist_list ~msg:__LOC__ [] l ;
|
||||||
list s [["a"];["g"]] >>= fun l ->
|
list (module Store) s ["g"] >>= fun l ->
|
||||||
Assert.equal_persist_list ~msg:__LOC__
|
Assert.equal_persist_list ~msg:__LOC__ [["g";"h"]] (List.sort compare l) ;
|
||||||
[["a"; "b"]; ["a"; "c"]; ["a"; "d"]; ["g"; "h"]] l ;
|
list (module Store) s ["i"] >>= fun l ->
|
||||||
Lwt.return_unit)
|
Assert.equal_persist_list ~msg:__LOC__ [] l ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
(** HashSet *)
|
(** HashSet *)
|
||||||
|
|
||||||
let test_hashset (s: Store.store) =
|
open Store_helpers
|
||||||
let module BlockSet = Hash_set(Block_hash) in
|
|
||||||
|
let test_hashset (type t)
|
||||||
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) =
|
||||||
|
let module BlockSet = Block_hash.Set in
|
||||||
let module StoreSet =
|
let module StoreSet =
|
||||||
Persist.MakeBufferedPersistentSet
|
Make_buffered_set
|
||||||
(Store.Faked_functional_store)
|
(Make_substore(Store)(struct let name = ["test_set"] end))
|
||||||
(struct
|
(Block_hash)
|
||||||
include Block_hash
|
(BlockSet) in
|
||||||
let prefix = [ "test_set" ]
|
|
||||||
let length = path_len
|
|
||||||
end)(BlockSet) in
|
|
||||||
let open BlockSet in
|
let open BlockSet in
|
||||||
let eq = BlockSet.equal in
|
|
||||||
let bhset : BlockSet.t = BlockSet.add bh2 (BlockSet.add bh1 BlockSet.empty) in
|
let bhset : BlockSet.t = BlockSet.add bh2 (BlockSet.add bh1 BlockSet.empty) in
|
||||||
Persist.use s.global_store (fun s ->
|
StoreSet.store_all s bhset >>= fun () ->
|
||||||
StoreSet.write s bhset >>= fun s ->
|
StoreSet.read_all s >>= fun bhset' ->
|
||||||
StoreSet.read s >>= fun bhset' ->
|
Assert.equal_block_set ~msg:__LOC__ bhset bhset' ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq bhset bhset' ;
|
let bhset2 =
|
||||||
let bhset2 =
|
Pervasives.(bhset |> BlockSet.add bh3 |> BlockSet.remove bh1) in
|
||||||
Pervasives.(bhset |> BlockSet.add bh3 |> BlockSet.remove bh1) in
|
StoreSet.store_all s bhset2 >>= fun () ->
|
||||||
StoreSet.write s bhset2 >>= fun s ->
|
StoreSet.read_all s >>= fun bhset2' ->
|
||||||
StoreSet.read s >>= fun bhset2' ->
|
Assert.equal_block_set ~msg:__LOC__ bhset2 bhset2' ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq bhset2 bhset2' ;
|
StoreSet.fold s BlockSet.empty
|
||||||
StoreSet.fold s BlockSet.empty
|
(fun bh acc -> Lwt.return (BlockSet.add bh acc)) >>= fun bhset2'' ->
|
||||||
(fun bh acc -> Lwt.return (BlockSet.add bh acc)) >>= fun bhset2'' ->
|
Assert.equal_block_set ~msg:__LOC__ bhset2 bhset2'' ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq bhset2 bhset2'' ;
|
Store.store s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
||||||
set s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
StoreSet.remove_all s >>= fun () ->
|
||||||
StoreSet.clear s >>= fun s ->
|
StoreSet.read_all s >>= fun empty ->
|
||||||
StoreSet.read s >>= fun empty ->
|
Assert.equal_block_set ~msg:__LOC__ BlockSet.empty empty ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq BlockSet.empty empty ;
|
check (module Store) s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
||||||
check s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () ->
|
Lwt.return_unit
|
||||||
Lwt.return_unit)
|
|
||||||
|
|
||||||
|
|
||||||
(** HashMap *)
|
(** HashMap *)
|
||||||
|
|
||||||
let test_hashmap (s: Store.store) =
|
let test_hashmap (type t)
|
||||||
let module BlockMap = Hash_map(Block_hash) in
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) =
|
||||||
|
let module BlockMap = Block_hash.Map in
|
||||||
let module StoreMap =
|
let module StoreMap =
|
||||||
Persist.MakeBufferedPersistentTypedMap
|
Make_buffered_map
|
||||||
(Store.Faked_functional_store)
|
(Make_substore(Store)(struct let name = ["test_map"] end))
|
||||||
(struct
|
(Block_hash)
|
||||||
include Block_hash
|
(Make_value(struct
|
||||||
let prefix = [ "test_map" ]
|
type t = int * char
|
||||||
let length = path_len
|
let encoding =
|
||||||
end)
|
Data_encoding.(tup2 int31 (conv int_of_char char_of_int int8))
|
||||||
(struct
|
end))
|
||||||
type value = int * char
|
|
||||||
let encoding =
|
|
||||||
Data_encoding.(tup2 int31 (conv int_of_char char_of_int int8))
|
|
||||||
end)
|
|
||||||
(BlockMap) in
|
(BlockMap) in
|
||||||
let eq = BlockMap.equal (=) in
|
let eq = (=) in
|
||||||
let map =
|
let map =
|
||||||
Pervasives.(BlockMap.empty |>
|
Pervasives.(BlockMap.empty |>
|
||||||
BlockMap.add bh1 (1, 'a') |> BlockMap.add bh2 (2, 'b')) in
|
BlockMap.add bh1 (1, 'a') |> BlockMap.add bh2 (2, 'b')) in
|
||||||
Persist.use s.global_store (fun s ->
|
StoreMap.store_all s map >>= fun () ->
|
||||||
StoreMap.write s map >>= fun s ->
|
StoreMap.read_all s >>= fun map' ->
|
||||||
StoreMap.read s >>= fun map' ->
|
Assert.equal_block_map ~msg:__LOC__ ~eq map map' ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq map map' ;
|
let map2 =
|
||||||
let map2 =
|
Pervasives.(map |> BlockMap.add bh3 (3, 'c') |> BlockMap.remove bh1) in
|
||||||
Pervasives.(map |> BlockMap.add bh3 (3, 'c') |> BlockMap.remove bh1) in
|
StoreMap.store_all s map2 >>= fun () ->
|
||||||
StoreMap.write s map2 >>= fun s ->
|
StoreMap.read_all s >>= fun map2' ->
|
||||||
StoreMap.read s >>= fun map2' ->
|
Assert.equal_block_map ~msg:__LOC__ ~eq map2 map2' ;
|
||||||
Assert.equal_block_map ~msg:__LOC__ ~eq map2 map2' ;
|
Lwt.return_unit
|
||||||
Lwt.return_unit)
|
|
||||||
|
(** Functors *)
|
||||||
|
|
||||||
|
let test_single (type t)
|
||||||
|
(module Store: Store_sigs.STORE with type t = t) (s: Store.t) =
|
||||||
|
let module Single =
|
||||||
|
Make_single_store
|
||||||
|
(Store)
|
||||||
|
(struct let name = ["plop"] end)
|
||||||
|
(Make_value(struct
|
||||||
|
type t = int * string
|
||||||
|
let encoding = Data_encoding.(tup2 int31 string)
|
||||||
|
end)) in
|
||||||
|
Single.known s >>= fun known ->
|
||||||
|
Assert.is_false ~msg:__LOC__ known ;
|
||||||
|
Single.read_opt s >>= fun v' ->
|
||||||
|
Assert.equal ~msg:__LOC__ None v' ;
|
||||||
|
let v = (3, "Non!") in
|
||||||
|
Single.store s v >>= fun () ->
|
||||||
|
Single.known s >>= fun known ->
|
||||||
|
Assert.is_true ~msg:__LOC__ known ;
|
||||||
|
Single.read_opt s >>= fun v' ->
|
||||||
|
Assert.equal ~msg:__LOC__ (Some v) v' ;
|
||||||
|
Single.remove s >>= fun v' ->
|
||||||
|
Single.known s >>= fun known ->
|
||||||
|
Assert.is_false ~msg:__LOC__ known ;
|
||||||
|
Single.read_opt s >>= fun v' ->
|
||||||
|
Assert.equal ~msg:__LOC__ None v' ;
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
module Sub =
|
||||||
|
Make_substore(Raw_store)(struct let name = ["plop";"plip"] end)
|
||||||
|
|
||||||
|
module SubBlocks =
|
||||||
|
Make_indexed_substore
|
||||||
|
(Make_substore(Raw_store)(struct let name = ["blocks"] end))
|
||||||
|
(Block_hash)
|
||||||
|
|
||||||
|
module SubBlocksSet =
|
||||||
|
SubBlocks.Make_buffered_set
|
||||||
|
(struct let name = ["test_set"] end)
|
||||||
|
(Block_hash.Set)
|
||||||
|
|
||||||
|
module SubBlocksMap =
|
||||||
|
SubBlocks.Make_buffered_map
|
||||||
|
(struct let name = ["test_map"] end)
|
||||||
|
(Make_value(struct
|
||||||
|
type t = int * string
|
||||||
|
let encoding = Data_encoding.(tup2 int31 string)
|
||||||
|
end))
|
||||||
|
(Block_hash.Map)
|
||||||
|
|
||||||
|
let test_subblock s =
|
||||||
|
SubBlocksSet.known s bh1 >>= fun known ->
|
||||||
|
Assert.is_false ~msg:__LOC__ known ;
|
||||||
|
SubBlocksSet.store s bh1 >>= fun () ->
|
||||||
|
SubBlocksSet.store s bh2 >>= fun () ->
|
||||||
|
SubBlocksSet.known s bh2 >>= fun known ->
|
||||||
|
Assert.is_true ~msg:__LOC__ known ;
|
||||||
|
SubBlocksSet.read_all s >>= fun set ->
|
||||||
|
let set' =
|
||||||
|
Block_hash.Set.empty
|
||||||
|
|> Block_hash.Set.add bh1
|
||||||
|
|> Block_hash.Set.add bh2 in
|
||||||
|
Assert.equal_block_set ~msg:__LOC__ set set' ;
|
||||||
|
SubBlocksSet.remove s bh2 >>= fun () ->
|
||||||
|
let set =
|
||||||
|
Block_hash.Set.empty
|
||||||
|
|> Block_hash.Set.add bh3'
|
||||||
|
|> Block_hash.Set.add bh3 in
|
||||||
|
SubBlocksSet.store_all s set >>= fun () ->
|
||||||
|
SubBlocksSet.elements s >>= fun elts ->
|
||||||
|
Assert.equal_block_hash_list ~msg:__LOC__
|
||||||
|
(List.sort Block_hash.compare elts)
|
||||||
|
(List.sort Block_hash.compare [bh3 ; bh3']) ;
|
||||||
|
SubBlocksSet.store s bh2 >>= fun () ->
|
||||||
|
SubBlocksSet.remove s bh3 >>= fun () ->
|
||||||
|
SubBlocksSet.elements s >>= fun elts ->
|
||||||
|
Assert.equal_block_hash_list ~msg:__LOC__
|
||||||
|
(List.sort Block_hash.compare elts)
|
||||||
|
(List.sort Block_hash.compare [bh2 ; bh3']) ;
|
||||||
|
SubBlocksMap.known s bh1 >>= fun known ->
|
||||||
|
Assert.is_false ~msg:__LOC__ known ;
|
||||||
|
let v1 = (3, "Non!")
|
||||||
|
and v2 = (12, "Beurk.") in
|
||||||
|
SubBlocksMap.store s bh1 v1 >>= fun () ->
|
||||||
|
SubBlocksMap.store s bh2 v2 >>= fun () ->
|
||||||
|
SubBlocksMap.read_opt s bh1 >>= fun v1' ->
|
||||||
|
SubBlocksMap.known s bh1 >>= fun known ->
|
||||||
|
Assert.is_true ~msg:__LOC__ known ;
|
||||||
|
let map =
|
||||||
|
Block_hash.Map.empty
|
||||||
|
|> Block_hash.Map.add bh1 v1
|
||||||
|
|> Block_hash.Map.add bh2 v2 in
|
||||||
|
SubBlocksMap.read_all s >>= fun map' ->
|
||||||
|
Assert.equal_block_map ~eq:(=) ~msg:__LOC__ map map' ;
|
||||||
|
|
||||||
|
SubBlocksSet.remove_all s >>= fun () ->
|
||||||
|
SubBlocksSet.elements s >>= fun elts ->
|
||||||
|
Assert.equal_block_hash_list ~msg:__LOC__ elts [] ;
|
||||||
|
|
||||||
|
SubBlocksMap.read_all s >>= fun map' ->
|
||||||
|
Assert.equal_block_map ~eq:(=) ~msg:__LOC__ map map' ;
|
||||||
|
|
||||||
|
SubBlocksSet.store s bh3 >>= fun () ->
|
||||||
|
|
||||||
|
SubBlocks.indexes s >>= fun keys ->
|
||||||
|
Assert.equal_block_hash_list ~msg:__LOC__
|
||||||
|
(List.sort Block_hash.compare keys)
|
||||||
|
(List.sort Block_hash.compare [bh1;bh2;bh3]) ;
|
||||||
|
|
||||||
|
Lwt.return_unit
|
||||||
|
|
||||||
|
module SubSubBlocks =
|
||||||
|
Make_indexed_substore
|
||||||
|
(Make_substore(SubBlocks.Store)(struct let name = ["sub_blocks"] end))
|
||||||
|
(Block_hash)
|
||||||
|
|
||||||
(** *)
|
(** *)
|
||||||
|
|
||||||
let tests : (string * (store -> unit Lwt.t)) list = [
|
let tests_raw : (string * (Raw_store.t -> unit Lwt.t)) list = [
|
||||||
|
|
||||||
"init", test_init ;
|
"init", test_init ;
|
||||||
"expand", test_expand ;
|
|
||||||
|
"generic", test_generic (module Raw_store) ;
|
||||||
|
"generic_substore", test_generic (module Sub) ;
|
||||||
|
"generic_indexedstore",
|
||||||
|
(fun s -> test_generic (module SubBlocks.Store) (s, bh1)) ;
|
||||||
|
"generic_indexedsubstore",
|
||||||
|
(fun s -> test_generic (module SubSubBlocks.Store) ((s, bh1), bh2)) ;
|
||||||
|
|
||||||
|
"single", test_single (module Raw_store) ;
|
||||||
|
"single_substore", test_single (module Sub) ;
|
||||||
|
"single_indexedstore",
|
||||||
|
(fun s -> test_single (module SubBlocks.Store) (s, bh1)) ;
|
||||||
|
"single_indexedsubstore",
|
||||||
|
(fun s -> test_single (module SubSubBlocks.Store) ((s, bh1), bh2)) ;
|
||||||
|
|
||||||
|
"generic_list", test_generic_list (module Raw_store);
|
||||||
|
"generic_substore_list", test_generic_list (module Sub);
|
||||||
|
"generic_indexedstore_list",
|
||||||
|
(fun s -> test_generic_list (module SubBlocks.Store) (s, bh1));
|
||||||
|
"generic_indexedsubstore_list",
|
||||||
|
(fun s -> test_generic_list (module SubSubBlocks.Store) ((s, bh1), bh2)) ;
|
||||||
|
|
||||||
|
"hashset", test_hashset (module Raw_store) ;
|
||||||
|
"hashset_substore", test_hashset (module Sub) ;
|
||||||
|
"hashset_indexedstore",
|
||||||
|
(fun s -> test_hashset (module SubBlocks.Store) (s, bh1));
|
||||||
|
"hashset_indexedsubstore",
|
||||||
|
(fun s -> test_hashset (module SubSubBlocks.Store) ((s, bh1), bh2)) ;
|
||||||
|
|
||||||
|
"hashmap", test_hashmap (module Raw_store) ;
|
||||||
|
"hashmap_substore", test_hashmap (module Sub) ;
|
||||||
|
"hashmap_indexedstore",
|
||||||
|
(fun s -> test_hashmap (module SubBlocks.Store) (s, bh1));
|
||||||
|
"hashmap_indexedsubstore",
|
||||||
|
(fun s -> test_hashmap (module SubSubBlocks.Store) ((s, bh1), bh2)) ;
|
||||||
|
|
||||||
|
"subblock", test_subblock ;
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
let tests : (string * (Store.t -> unit Lwt.t)) list = [
|
||||||
|
(* "expand", test_expand ; *) (* FIXME GRGR *)
|
||||||
"operation", test_operation ;
|
"operation", test_operation ;
|
||||||
"block", test_block ;
|
"block", test_block ;
|
||||||
"generic", test_generic ;
|
|
||||||
"generic_list", test_generic_list ;
|
|
||||||
"hashset", test_hashset ;
|
|
||||||
"hashmap", test_hashmap ;
|
|
||||||
]
|
]
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
Test.run "store." (List.map (fun (s, f) -> s, wrap_store_init f) tests)
|
Test.run "store."
|
||||||
|
(List.map (fun (s, f) -> s, wrap_raw_store_init f) tests_raw @
|
||||||
|
List.map (fun (s, f) -> s, wrap_store_init f) tests)
|
||||||
|
Loading…
Reference in New Issue
Block a user