Merge branch 'merkle'
This commit is contained in:
commit
c2bf738079
1
.gitignore
vendored
1
.gitignore
vendored
@ -42,6 +42,7 @@
|
||||
/test/test-context
|
||||
/test/test-basic
|
||||
/test/test-data-encoding
|
||||
/test/test-merkle
|
||||
/test/test-p2p-io-scheduler
|
||||
/test/test-p2p-connection
|
||||
/test/test-p2p-connection-pool
|
||||
|
@ -86,6 +86,15 @@ test:data-encoding:
|
||||
dependencies:
|
||||
- build
|
||||
|
||||
test:merkle:
|
||||
stage: test
|
||||
tags:
|
||||
- tezos_builder
|
||||
script:
|
||||
- make -C test run-test-merkle
|
||||
dependencies:
|
||||
- build
|
||||
|
||||
test:p2p-io-scheduler:
|
||||
stage: test
|
||||
tags:
|
||||
|
@ -135,12 +135,13 @@ let forge_block cctxt ?net ?predecessor ?timestamp fitness ops header =
|
||||
(net, predecessor, timestamp, fitness, ops, header)
|
||||
let validate_block cctxt net block =
|
||||
call_service0 cctxt Services.validate_block (net, block)
|
||||
let inject_block cctxt ?(wait = true) ?force block =
|
||||
call_service0 cctxt Services.inject_block (block, wait, force)
|
||||
let inject_operation cctxt ?(wait = true) ?force operation =
|
||||
call_service0 cctxt Services.inject_operation (operation, wait, force)
|
||||
let inject_protocol cctxt ?(wait = true) ?force protocol =
|
||||
call_service0 cctxt Services.inject_protocol (protocol, wait, force)
|
||||
let inject_block cctxt ?(async = false) ?(force = false) raw operations =
|
||||
call_service0 cctxt Services.inject_block
|
||||
{ raw ; blocking = not async ; force ; operations }
|
||||
let inject_operation cctxt ?(async = false) ?force operation =
|
||||
call_service0 cctxt Services.inject_operation (operation, not async, force)
|
||||
let inject_protocol cctxt ?(async = false) ?force protocol =
|
||||
call_service0 cctxt Services.inject_protocol (protocol, not async, force)
|
||||
let bootstrapped cctxt =
|
||||
call_streamed_service0 cctxt Services.bootstrapped ()
|
||||
let complete cctxt ?block prefix =
|
||||
@ -163,7 +164,8 @@ module Blocks = struct
|
||||
fitness: MBytes.t list ;
|
||||
timestamp: Time.t ;
|
||||
protocol: Protocol_hash.t option ;
|
||||
operations: Operation_hash.t list option ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list option ;
|
||||
data: MBytes.t option ;
|
||||
net: Updater.Net_id.t ;
|
||||
test_protocol: Protocol_hash.t option ;
|
||||
|
@ -17,7 +17,7 @@ val forge_block:
|
||||
?predecessor:Block_hash.t ->
|
||||
?timestamp:Time.t ->
|
||||
Fitness.fitness ->
|
||||
Operation_hash.t list ->
|
||||
Operation_list_list_hash.t ->
|
||||
MBytes.t ->
|
||||
MBytes.t Lwt.t
|
||||
(** [forge_block cctxt ?net ?predecessor ?timestamp fitness ops
|
||||
@ -33,24 +33,24 @@ val validate_block:
|
||||
|
||||
val inject_block:
|
||||
Client_commands.context ->
|
||||
?wait:bool -> ?force:bool ->
|
||||
MBytes.t ->
|
||||
?async:bool -> ?force:bool ->
|
||||
MBytes.t -> Operation_hash.t list list ->
|
||||
Block_hash.t tzresult Lwt.t
|
||||
(** [inject_block cctxt ?wait ?force raw_block] tries to inject
|
||||
[raw_block] inside the node. If [?wait] is [true], [raw_block]
|
||||
(** [inject_block cctxt ?async ?force raw_block] tries to inject
|
||||
[raw_block] inside the node. If [?async] is [true], [raw_block]
|
||||
will be validated before the result is returned. If [?force] is
|
||||
true, the block will be injected even on non strictly increasing
|
||||
fitness. *)
|
||||
|
||||
val inject_operation:
|
||||
Client_commands.context ->
|
||||
?wait:bool -> ?force:bool ->
|
||||
?async:bool -> ?force:bool ->
|
||||
MBytes.t ->
|
||||
Operation_hash.t tzresult Lwt.t
|
||||
|
||||
val inject_protocol:
|
||||
Client_commands.context ->
|
||||
?wait:bool -> ?force:bool ->
|
||||
?async:bool -> ?force:bool ->
|
||||
Tezos_compiler.Protocol.t ->
|
||||
Protocol_hash.t tzresult Lwt.t
|
||||
|
||||
@ -83,7 +83,7 @@ module Blocks : sig
|
||||
block -> MBytes.t list Lwt.t
|
||||
val operations:
|
||||
Client_commands.context ->
|
||||
block -> Operation_hash.t list Lwt.t
|
||||
block -> Operation_hash.t list list Lwt.t
|
||||
val protocol:
|
||||
Client_commands.context ->
|
||||
block -> Protocol_hash.t Lwt.t
|
||||
@ -104,7 +104,8 @@ module Blocks : sig
|
||||
fitness: MBytes.t list ;
|
||||
timestamp: Time.t ;
|
||||
protocol: Protocol_hash.t option ;
|
||||
operations: Operation_hash.t list option ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list option ;
|
||||
data: MBytes.t option ;
|
||||
net: Updater.Net_id.t ;
|
||||
test_protocol: Protocol_hash.t option ;
|
||||
@ -146,7 +147,7 @@ module Operations : sig
|
||||
val monitor:
|
||||
Client_commands.context ->
|
||||
?contents:bool -> unit ->
|
||||
(Operation_hash.t * Store.Operation.t option) list Lwt_stream.t Lwt.t
|
||||
(Operation_hash.t * Store.Operation.t option) list list Lwt_stream.t Lwt.t
|
||||
end
|
||||
|
||||
module Protocols : sig
|
||||
|
@ -116,7 +116,7 @@ let get_signing_slots cctxt ?max_priority block delegate level =
|
||||
return slots
|
||||
|
||||
let inject_endorsement cctxt
|
||||
block level ?wait ?force
|
||||
block level ?async ?force
|
||||
src_sk source slot =
|
||||
Client_blocks.get_block_hash cctxt block >>= fun block_hash ->
|
||||
Client_node_rpcs.Blocks.net cctxt block >>= fun net ->
|
||||
@ -129,7 +129,7 @@ let inject_endorsement cctxt
|
||||
() >>=? fun bytes ->
|
||||
let signed_bytes = Ed25519.Signature.append src_sk bytes in
|
||||
Client_node_rpcs.inject_operation
|
||||
cctxt ?force ?wait signed_bytes >>=? fun oph ->
|
||||
cctxt ?force ?async signed_bytes >>=? fun oph ->
|
||||
State.record_endorsement cctxt level block_hash slot oph >>=? fun () ->
|
||||
return oph
|
||||
|
||||
@ -173,7 +173,7 @@ let forge_endorsement cctxt
|
||||
else check_endorsement cctxt level slot
|
||||
end >>=? fun () ->
|
||||
inject_endorsement cctxt
|
||||
block level ~wait:true ~force
|
||||
block level ~force
|
||||
src_sk src_pk slot
|
||||
|
||||
|
||||
@ -316,7 +316,7 @@ let endorse cctxt state =
|
||||
lwt_debug "Endorsing %a for %s (slot %d)!"
|
||||
Block_hash.pp_short hash name slot >>= fun () ->
|
||||
inject_endorsement cctxt
|
||||
b level ~wait:false ~force:true
|
||||
b level ~async:true ~force:true
|
||||
sk pk slot >>=? fun oph ->
|
||||
cctxt.message
|
||||
"Injected endorsement for block '%a' \
|
||||
|
@ -40,11 +40,14 @@ let rec compute_stamp
|
||||
let inject_block cctxt block
|
||||
?force
|
||||
~priority ~timestamp ~fitness ~seed_nonce
|
||||
~src_sk operations =
|
||||
~src_sk operation_list =
|
||||
let block = match block with `Prevalidation -> `Head 0 | block -> block in
|
||||
Client_node_rpcs.Blocks.info cctxt block >>= fun bi ->
|
||||
let seed_nonce_hash = Nonce.hash seed_nonce in
|
||||
Client_proto_rpcs.Context.next_level cctxt block >>=? fun level ->
|
||||
let operations =
|
||||
Operation_list_list_hash.compute
|
||||
(List.map Operation_list_hash.compute operation_list) in
|
||||
let shell =
|
||||
{ Store.Block_header.net_id = bi.net ; predecessor = bi.hash ;
|
||||
timestamp ; fitness ; operations } in
|
||||
@ -65,7 +68,7 @@ let inject_block cctxt block
|
||||
() >>=? fun unsigned_header ->
|
||||
let signed_header = Ed25519.Signature.append src_sk unsigned_header in
|
||||
Client_node_rpcs.inject_block cctxt
|
||||
~wait:true ?force signed_header >>=? fun block_hash ->
|
||||
?force signed_header operation_list >>=? fun block_hash ->
|
||||
return block_hash
|
||||
|
||||
let forge_block cctxt block
|
||||
@ -138,7 +141,8 @@ let forge_block cctxt block
|
||||
&& Operation_hash.Map.is_empty operations.branch_refused
|
||||
&& Operation_hash.Map.is_empty operations.branch_delayed ) then
|
||||
inject_block cctxt ?force ~src_sk
|
||||
~priority ~timestamp ~fitness ~seed_nonce block operations.applied
|
||||
~priority ~timestamp ~fitness ~seed_nonce block
|
||||
[operations.applied]
|
||||
else
|
||||
failwith "Cannot (fully) validate the given operations."
|
||||
|
||||
@ -436,8 +440,9 @@ let mine cctxt state =
|
||||
Fitness.pp fitness >>= fun () ->
|
||||
let seed_nonce = generate_seed_nonce () in
|
||||
Client_keys.get_key cctxt delegate >>=? fun (_,_,src_sk) ->
|
||||
inject_block cctxt ~force:true ~src_sk ~priority ~timestamp ~fitness ~seed_nonce
|
||||
(`Hash bi.hash) operations.applied
|
||||
inject_block cctxt
|
||||
~force:true ~src_sk ~priority ~timestamp ~fitness ~seed_nonce
|
||||
(`Hash bi.hash) [operations.applied]
|
||||
|> trace_exn (Failure "Error while injecting block") >>=? fun block_hash ->
|
||||
State.record_block cctxt level block_hash seed_nonce
|
||||
|> trace_exn (Failure "Error while recording block") >>=? fun () ->
|
||||
|
@ -22,7 +22,7 @@ val inject_block:
|
||||
fitness:Fitness.t ->
|
||||
seed_nonce:Nonce.t ->
|
||||
src_sk:secret_key ->
|
||||
Operation_hash.t list ->
|
||||
Operation_hash.t list list ->
|
||||
Block_hash.t tzresult Lwt.t
|
||||
(** [inject_block cctxt blk ?force ~priority ~timestamp ~fitness
|
||||
~seed_nonce ~src_sk ops] tries to inject a block in the node. If
|
||||
|
@ -34,7 +34,7 @@ let monitor cctxt ?contents ?check () =
|
||||
"@[<v 2>Error while parsing operations@,%a@["
|
||||
pp_print_error err >>= fun () ->
|
||||
Lwt.return None)
|
||||
ops
|
||||
(List.concat ops)
|
||||
in
|
||||
Lwt.return (Lwt_stream.map_s convert ops_stream)
|
||||
|
||||
|
@ -11,7 +11,7 @@ open Cli_entries
|
||||
open Tezos_context
|
||||
open Logging.Client.Revelation
|
||||
|
||||
let inject_seed_nonce_revelation cctxt block ?force ?wait nonces =
|
||||
let inject_seed_nonce_revelation cctxt block ?force ?async nonces =
|
||||
let operations =
|
||||
List.map
|
||||
(fun (level, nonce) ->
|
||||
@ -19,7 +19,7 @@ let inject_seed_nonce_revelation cctxt block ?force ?wait nonces =
|
||||
Client_node_rpcs.Blocks.net cctxt block >>= fun net ->
|
||||
Client_proto_rpcs.Helpers.Forge.Anonymous.operations cctxt
|
||||
block ~net operations >>=? fun bytes ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force ?wait bytes >>=? fun oph ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force ?async bytes >>=? fun oph ->
|
||||
return oph
|
||||
|
||||
type Error_monad.error += Bad_revelation
|
||||
@ -34,8 +34,7 @@ let forge_seed_nonce_revelation
|
||||
Block_hash.pp_short hash >>= fun () ->
|
||||
return ()
|
||||
| _ ->
|
||||
inject_seed_nonce_revelation cctxt
|
||||
block ~force ~wait:true nonces >>=? fun oph ->
|
||||
inject_seed_nonce_revelation cctxt block ~force nonces >>=? fun oph ->
|
||||
cctxt.answer
|
||||
"Operation successfully injected %d revelation(s) for %a."
|
||||
(List.length nonces)
|
||||
|
@ -11,7 +11,7 @@ val inject_seed_nonce_revelation:
|
||||
Client_commands.context ->
|
||||
Client_proto_rpcs.block ->
|
||||
?force:bool ->
|
||||
?wait:bool ->
|
||||
?async:bool ->
|
||||
(Raw_level.t * Nonce.t) list ->
|
||||
Operation_hash.t tzresult Lwt.t
|
||||
|
||||
|
@ -104,7 +104,7 @@ let transfer cctxt
|
||||
let oph = Operation_hash.hash_bytes [ signed_bytes ] in
|
||||
Client_proto_rpcs.Helpers.apply_operation cctxt block
|
||||
predecessor oph bytes (Some signature) >>=? fun contracts ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force ~wait:true signed_bytes >>=? fun injected_oph ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force signed_bytes >>=? fun injected_oph ->
|
||||
assert (Operation_hash.equal oph injected_oph) ;
|
||||
cctxt.message "Operation successfully injected in the node." >>= fun () ->
|
||||
cctxt.message "Operation hash is '%a'." Operation_hash.pp oph >>= fun () ->
|
||||
@ -121,7 +121,7 @@ let originate cctxt ?force ~block ?signature bytes =
|
||||
Client_proto_rpcs.Helpers.apply_operation cctxt block
|
||||
predecessor oph bytes signature >>=? function
|
||||
| [ contract ] ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force ~wait:true signed_bytes >>=? fun injected_oph ->
|
||||
Client_node_rpcs.inject_operation cctxt ?force signed_bytes >>=? fun injected_oph ->
|
||||
assert (Operation_hash.equal oph injected_oph) ;
|
||||
cctxt.message "Operation successfully injected in the node." >>= fun () ->
|
||||
cctxt.message "Operation hash is '%a'." Operation_hash.pp oph >>= fun () ->
|
||||
@ -176,7 +176,7 @@ let dictate cctxt block command seckey =
|
||||
let signature = Ed25519.sign seckey bytes in
|
||||
let signed_bytes = MBytes.concat bytes signature in
|
||||
let oph = Operation_hash.hash_bytes [ signed_bytes ] in
|
||||
Client_node_rpcs.inject_operation cctxt ~wait:true signed_bytes >>=? fun injected_oph ->
|
||||
Client_node_rpcs.inject_operation cctxt signed_bytes >>=? fun injected_oph ->
|
||||
assert (Operation_hash.equal oph injected_oph) ;
|
||||
cctxt.message "Operation successfully injected in the node." >>= fun () ->
|
||||
cctxt.message "Operation hash is '%a'." Operation_hash.pp oph >>= fun () ->
|
||||
|
@ -314,7 +314,7 @@ module Helpers : sig
|
||||
predecessor:Block_hash.t ->
|
||||
timestamp:Time.t ->
|
||||
fitness:Fitness.t ->
|
||||
operations:Operation_hash.t list ->
|
||||
operations:Operation_list_list_hash.t ->
|
||||
level:Raw_level.t ->
|
||||
priority:int ->
|
||||
seed_nonce_hash:Nonce_hash.t ->
|
||||
|
@ -52,8 +52,8 @@ let mine cctxt =
|
||||
exit 2 in
|
||||
Client_node_rpcs.forge_block cctxt
|
||||
~net:bi.net ~predecessor:bi.hash
|
||||
fitness [] (MBytes.create 0) >>= fun bytes ->
|
||||
Client_node_rpcs.inject_block cctxt ~wait:true bytes >>=? fun hash ->
|
||||
fitness Operation_list_list_hash.empty (MBytes.create 0) >>= fun bytes ->
|
||||
Client_node_rpcs.inject_block cctxt bytes [] >>=? fun hash ->
|
||||
cctxt.answer "Injected %a" Block_hash.pp_short hash >>= fun () ->
|
||||
return ()
|
||||
|
||||
|
@ -29,7 +29,7 @@ let mine cctxt ?timestamp block command fitness seckey =
|
||||
Client_blocks.get_block_info cctxt block >>= fun bi ->
|
||||
forge_block cctxt ?timestamp block bi.net command fitness >>= fun blk ->
|
||||
let signed_blk = Environment.Ed25519.Signature.append seckey blk in
|
||||
Client_node_rpcs.inject_block cctxt ~wait:true signed_blk >>=? fun hash ->
|
||||
Client_node_rpcs.inject_block cctxt signed_blk [[]] >>=? fun hash ->
|
||||
cctxt.answer "Injected %a" Block_hash.pp_short hash >>= fun () ->
|
||||
return ()
|
||||
|
||||
@ -48,6 +48,7 @@ let commands () =
|
||||
"Set the timestamp of the block (and initial time of the chain)" ] in
|
||||
let open Cli_entries in
|
||||
[
|
||||
|
||||
command ~args ~desc: "Activate a protocol" begin
|
||||
prefixes [ "activate" ; "protocol" ] @@
|
||||
param ~name:"version" ~desc:"Protocol version (b58check)"
|
||||
@ -60,15 +61,15 @@ let commands () =
|
||||
Client_keys.Secret_key.source_param
|
||||
~name:"password" ~desc:"Dictator's key" @@
|
||||
stop
|
||||
end
|
||||
(fun hash fitness seckey cctxt ->
|
||||
end begin fun hash fitness seckey cctxt ->
|
||||
let timestamp = !timestamp in
|
||||
let fitness =
|
||||
Client_embedded_proto_alpha.Fitness_repr.from_int64 fitness in
|
||||
mine cctxt ?timestamp cctxt.config.block
|
||||
(Activate hash) fitness seckey >>=
|
||||
handle_error cctxt)
|
||||
;
|
||||
handle_error cctxt
|
||||
end ;
|
||||
|
||||
command ~args ~desc: "Fork a test protocol" begin
|
||||
prefixes [ "fork" ; "test" ; "protocol" ] @@
|
||||
param ~name:"version" ~desc:"Protocol version (b58check)"
|
||||
@ -80,16 +81,17 @@ let commands () =
|
||||
prefixes [ "and" ; "key" ] @@
|
||||
param ~name:"password" ~desc:"Dictator's key"
|
||||
(fun _ key ->
|
||||
Lwt.return (Environment.Ed25519.Secret_key.of_b58check key))
|
||||
Lwt.return (Environment.Ed25519.Secret_key.of_b58check key)) @@
|
||||
stop
|
||||
end
|
||||
(fun hash fitness seckey cctxt ->
|
||||
end begin fun hash fitness seckey cctxt ->
|
||||
let timestamp = !timestamp in
|
||||
let fitness =
|
||||
Client_embedded_proto_alpha.Fitness_repr.from_int64 fitness in
|
||||
mine cctxt ?timestamp cctxt.config.block
|
||||
(Activate_testnet hash) fitness seckey >>=
|
||||
handle_error cctxt) ;
|
||||
handle_error cctxt
|
||||
end ;
|
||||
|
||||
]
|
||||
|
||||
let () =
|
||||
|
@ -90,6 +90,11 @@ let list_hd_opt = function
|
||||
| [] -> None
|
||||
| h :: _ -> Some h
|
||||
|
||||
let rec list_last_exn = function
|
||||
| [] -> raise Not_found
|
||||
| [x] -> x
|
||||
| _ :: xs -> list_last_exn xs
|
||||
|
||||
let merge_filter_list2
|
||||
?(finalize = List.rev) ?(compare = compare)
|
||||
?(f = first_some)
|
||||
|
@ -44,6 +44,7 @@ val list_rev_sub : 'a list -> int -> 'a list
|
||||
(** [list_sub l n] is l capped to max n elements *)
|
||||
val list_sub: 'a list -> int -> 'a list
|
||||
val list_hd_opt: 'a list -> 'a option
|
||||
val list_last_exn: 'a list -> 'a
|
||||
|
||||
(** [merge_filter_list2 ~compare ~f l1 l2] merges two lists ordered by [compare]
|
||||
and whose items can be merged with [f]. Item is discarded or kept whether
|
||||
|
@ -49,6 +49,20 @@ let read_opt s k =
|
||||
|
||||
type error += Unknown of string list
|
||||
|
||||
let () =
|
||||
Error_monad.register_error_kind
|
||||
`Permanent
|
||||
~id:"store.unkown_key"
|
||||
~title:"Unknown key in store"
|
||||
~description: ""
|
||||
~pp:(fun ppf key ->
|
||||
Format.fprintf ppf
|
||||
"@[<v 2>Unknown key %s@]"
|
||||
(String.concat "/" key))
|
||||
Data_encoding.(obj1 (req "key" (list string)))
|
||||
(function Unknown key -> Some key | _ -> None)
|
||||
(fun key -> Unknown key)
|
||||
|
||||
let read t key =
|
||||
read_opt t key >>= function
|
||||
| None -> fail (Unknown key)
|
||||
|
@ -283,23 +283,23 @@ module Block_header = struct
|
||||
net_id: Net_id.t ;
|
||||
predecessor: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
operations: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
operations: Operation_hash.t list ;
|
||||
}
|
||||
|
||||
let shell_header_encoding =
|
||||
let open Data_encoding in
|
||||
conv
|
||||
(fun { net_id ; predecessor ; timestamp ; fitness ; operations } ->
|
||||
(net_id, predecessor, timestamp, fitness, operations))
|
||||
(fun (net_id, predecessor, timestamp, fitness, operations) ->
|
||||
{ net_id ; predecessor ; timestamp ; fitness ; operations })
|
||||
(fun { net_id ; predecessor ; timestamp ; operations ; fitness } ->
|
||||
(net_id, predecessor, timestamp, operations, fitness))
|
||||
(fun (net_id, predecessor, timestamp, operations, fitness) ->
|
||||
{ net_id ; predecessor ; timestamp ; operations ; fitness })
|
||||
(obj5
|
||||
(req "net_id" Net_id.encoding)
|
||||
(req "predecessor" Block_hash.encoding)
|
||||
(req "timestamp" Time.encoding)
|
||||
(req "fitness" Fitness.encoding)
|
||||
(req "operations" (list Operation_hash.encoding)))
|
||||
(req "operations" Operation_list_list_hash.encoding)
|
||||
(req "fitness" Fitness.encoding))
|
||||
|
||||
module Encoding = struct
|
||||
type t = {
|
||||
@ -329,7 +329,7 @@ module Block_header = struct
|
||||
compare x y >> fun () -> list compare xs ys in
|
||||
Block_hash.compare b1.shell.predecessor b2.shell.predecessor >> fun () ->
|
||||
compare b1.proto b2.proto >> fun () ->
|
||||
list Operation_hash.compare
|
||||
Operation_list_list_hash.compare
|
||||
b1.shell.operations b2.shell.operations >> fun () ->
|
||||
Time.compare b1.shell.timestamp b2.shell.timestamp >> fun () ->
|
||||
list compare b1.shell.fitness b2.shell.fitness
|
||||
@ -349,6 +349,38 @@ module Block_header = struct
|
||||
(Value)
|
||||
(Block_hash.Set)
|
||||
|
||||
module Operation_list_count =
|
||||
Store_helpers.Make_single_store
|
||||
(Indexed_store.Store)
|
||||
(struct let name = ["operation_list_count"] end)
|
||||
(Store_helpers.Make_value(struct
|
||||
type t = int
|
||||
let encoding = Data_encoding.int8
|
||||
end))
|
||||
|
||||
module Operations_index =
|
||||
Store_helpers.Make_indexed_substore
|
||||
(Store_helpers.Make_substore
|
||||
(Indexed_store.Store)
|
||||
(struct let name = ["operations"] end))
|
||||
(Store_helpers.Integer_index)
|
||||
|
||||
module Operation_list =
|
||||
Operations_index.Make_map
|
||||
(struct let name = ["list"] end)
|
||||
(Store_helpers.Make_value(struct
|
||||
type t = Operation_hash.t list
|
||||
let encoding = Data_encoding.list Operation_hash.encoding
|
||||
end))
|
||||
|
||||
module Operation_list_path =
|
||||
Operations_index.Make_map
|
||||
(struct let name = ["path"] end)
|
||||
(Store_helpers.Make_value(struct
|
||||
type t = Operation_list_list_hash.path
|
||||
let encoding = Operation_list_list_hash.path_encoding
|
||||
end))
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
@ -187,8 +187,8 @@ module Block_header : sig
|
||||
net_id: Net_id.t ;
|
||||
predecessor: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
operations: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
operations: Operation_hash.t list ;
|
||||
}
|
||||
val shell_header_encoding: shell_header Data_encoding.t
|
||||
|
||||
@ -206,6 +206,20 @@ module Block_header : sig
|
||||
and type value = t
|
||||
and type key_set = Block_hash.Set.t
|
||||
|
||||
module Operation_list_count : SINGLE_STORE
|
||||
with type t = store * Block_hash.t
|
||||
and type value = int
|
||||
|
||||
module Operation_list : MAP_STORE
|
||||
with type t = store * Block_hash.t
|
||||
and type key = int
|
||||
and type value = Operation_hash.t list
|
||||
|
||||
module Operation_list_path : MAP_STORE
|
||||
with type t = store * Block_hash.t
|
||||
and type key = int
|
||||
and type value = Operation_list_list_hash.path
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
@ -360,3 +360,12 @@ module Make_buffered_map
|
||||
(fun k v acc -> let res = store s k v in acc >>= fun () -> res)
|
||||
map Lwt.return_unit
|
||||
end
|
||||
|
||||
module Integer_index = struct
|
||||
type t = int
|
||||
let path_length = 1
|
||||
let to_path x = [string_of_int x]
|
||||
let of_path = function
|
||||
| [x] -> begin try Some (int_of_string x) with _ -> None end
|
||||
| _ -> None
|
||||
end
|
||||
|
@ -43,3 +43,5 @@ module Make_buffered_map
|
||||
module Make_indexed_substore (S : STORE) (I : INDEX)
|
||||
: INDEXED_STORE with type t = S.t
|
||||
and type key = I.t
|
||||
|
||||
module Integer_index : INDEX with type t = int
|
||||
|
@ -21,13 +21,18 @@ type 'a request_param = {
|
||||
}
|
||||
|
||||
module Make_raw
|
||||
(Hash : HASH)
|
||||
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||
(Hash : sig type t end)
|
||||
(Disk_table :
|
||||
Distributed_db_functors.DISK_TABLE with type key := Hash.t)
|
||||
(Memory_table :
|
||||
Distributed_db_functors.MEMORY_TABLE with type key := Hash.t)
|
||||
(Request_message : sig
|
||||
type param
|
||||
val forge : param -> Hash.t list -> Message.t
|
||||
end) = struct
|
||||
end)
|
||||
(Precheck : Distributed_db_functors.PRECHECK
|
||||
with type key := Hash.t
|
||||
and type value := Disk_table.value) = struct
|
||||
|
||||
type key = Hash.t
|
||||
type value = Disk_table.value
|
||||
@ -45,7 +50,7 @@ module Make_raw
|
||||
(Hash) (Memory_table) (Request)
|
||||
module Table =
|
||||
Distributed_db_functors.Make_table
|
||||
(Hash) (Disk_table) (Memory_table) (Scheduler)
|
||||
(Hash) (Disk_table) (Memory_table) (Scheduler) (Precheck)
|
||||
|
||||
type t = {
|
||||
scheduler: Scheduler.t ;
|
||||
@ -62,23 +67,71 @@ module Make_raw
|
||||
|
||||
end
|
||||
|
||||
module No_precheck = struct
|
||||
type param = unit
|
||||
let precheck _ _ _ = true
|
||||
end
|
||||
|
||||
module Raw_operation =
|
||||
Make_raw (Operation_hash) (State.Operation) (Operation_hash.Table) (struct
|
||||
Make_raw
|
||||
(Operation_hash)
|
||||
(State.Operation)
|
||||
(Operation_hash.Table)
|
||||
(struct
|
||||
type param = Net_id.t
|
||||
let forge net_id keys = Message.Get_operations (net_id, keys)
|
||||
end)
|
||||
(No_precheck)
|
||||
|
||||
module Raw_block_header =
|
||||
Make_raw (Block_hash) (State.Block_header) (Block_hash.Table) (struct
|
||||
Make_raw
|
||||
(Block_hash)
|
||||
(State.Block_header)
|
||||
(Block_hash.Table)
|
||||
(struct
|
||||
type param = Net_id.t
|
||||
let forge net_id keys = Message.Get_block_headers (net_id, keys)
|
||||
end)
|
||||
(No_precheck)
|
||||
|
||||
module Operation_list_table =
|
||||
Hashtbl.Make(struct
|
||||
type t = Block_hash.t * int
|
||||
let hash = Hashtbl.hash
|
||||
let equal (b1, i1) (b2, i2) =
|
||||
Block_hash.equal b1 b2 && i1 = i2
|
||||
end)
|
||||
|
||||
module Raw_operation_list =
|
||||
Make_raw
|
||||
(struct type t = Block_hash.t * int end)
|
||||
(State.Operation_list)
|
||||
(Operation_list_table)
|
||||
(struct
|
||||
type param = Net_id.t
|
||||
let forge net_id keys =
|
||||
Message.Get_operation_list (net_id, keys)
|
||||
end)
|
||||
(struct
|
||||
type param = Operation_list_list_hash.t
|
||||
let precheck (_block, expected_ofs) expected_hash (ops, path) =
|
||||
let received_hash, received_ofs =
|
||||
Operation_list_list_hash.check_path path
|
||||
(Operation_list_hash.compute ops) in
|
||||
received_ofs = expected_ofs &&
|
||||
Operation_list_list_hash.compare expected_hash received_hash = 0
|
||||
end)
|
||||
|
||||
module Raw_protocol =
|
||||
Make_raw (Protocol_hash) (State.Protocol) (Protocol_hash.Table) (struct
|
||||
Make_raw
|
||||
(Protocol_hash)
|
||||
(State.Protocol)
|
||||
(Protocol_hash.Table)
|
||||
(struct
|
||||
type param = unit
|
||||
let forge () keys = Message.Get_protocols keys
|
||||
end)
|
||||
(No_precheck)
|
||||
|
||||
type callback = {
|
||||
notify_branch: P2p.Peer_id.t -> Block_hash.t list -> unit ;
|
||||
@ -103,6 +156,7 @@ and net = {
|
||||
global_db: db ;
|
||||
operation_db: Raw_operation.t ;
|
||||
block_header_db: Raw_block_header.t ;
|
||||
operation_list_db: Raw_operation_list.t ;
|
||||
callback: callback ;
|
||||
active_peers: P2p.Peer_id.Set.t ref ;
|
||||
active_connections: p2p_reader P2p.Peer_id.Table.t ;
|
||||
@ -266,6 +320,43 @@ module P2p_reader = struct
|
||||
global_db.protocol_db.table state.gid hash protocol >>= fun () ->
|
||||
Lwt.return_unit
|
||||
|
||||
| Get_operation_list (net_id, hashes) ->
|
||||
may_handle state net_id @@ fun net_db ->
|
||||
Lwt_list.iter_p
|
||||
(fun (block, ofs as key) ->
|
||||
Raw_operation_list.Table.read
|
||||
net_db.operation_list_db.table key >>= function
|
||||
| None -> Lwt.return_unit
|
||||
| Some (ops, path) ->
|
||||
ignore @@
|
||||
P2p.try_send
|
||||
global_db.p2p state.conn
|
||||
(Operation_list (net_id, block, ofs, ops, path)) ;
|
||||
Lwt.return_unit)
|
||||
hashes
|
||||
|
||||
| Operation_list (net_id, block, ofs, ops, path) ->
|
||||
may_handle state net_id @@ fun net_db ->
|
||||
(* TODO early detection of non-requested list. *)
|
||||
let found_hash, found_ofs =
|
||||
Operation_list_list_hash.check_path
|
||||
path (Operation_list_hash.compute ops) in
|
||||
if found_ofs <> ofs then
|
||||
Lwt.return_unit
|
||||
else
|
||||
Raw_block_header.Table.read
|
||||
net_db.block_header_db.table block >>= function
|
||||
| None -> Lwt.return_unit
|
||||
| Some bh ->
|
||||
if Operation_list_list_hash.compare
|
||||
found_hash bh.shell.operations <> 0 then
|
||||
Lwt.return_unit
|
||||
else
|
||||
Raw_operation_list.Table.notify
|
||||
net_db.operation_list_db.table state.gid
|
||||
(block, ofs) (ops, path) >>= fun () ->
|
||||
Lwt.return_unit
|
||||
|
||||
let rec worker_loop global_db state =
|
||||
Lwt_utils.protect ~canceler:state.canceler begin fun () ->
|
||||
P2p.recv global_db.p2p state.conn
|
||||
@ -353,8 +444,10 @@ let activate ~callback ({ p2p ; active_nets } as global_db) net =
|
||||
let block_header_db =
|
||||
Raw_block_header.create
|
||||
~global_input:global_db.block_input p2p_request net in
|
||||
let operation_list_db =
|
||||
Raw_operation_list.create p2p_request net in
|
||||
let net = {
|
||||
global_db ; operation_db ; block_header_db ;
|
||||
global_db ; operation_db ; block_header_db ; operation_list_db ;
|
||||
net ; callback ; active_peers ;
|
||||
active_connections = P2p.Peer_id.Table.create 53 ;
|
||||
} in
|
||||
@ -403,10 +496,13 @@ let shutdown { p2p ; p2p_readers ; active_nets } =
|
||||
P2p.shutdown p2p >>= fun () ->
|
||||
Lwt.return_unit
|
||||
|
||||
module type DISTRIBUTED_DB = Distributed_db_functors.DISTRIBUTED_DB
|
||||
module type PARAMETRIZED_DISTRIBUTED_DB =
|
||||
Distributed_db_functors.PARAMETRIZED_DISTRIBUTED_DB
|
||||
module type DISTRIBUTED_DB =
|
||||
Distributed_db_functors.DISTRIBUTED_DB
|
||||
|
||||
module Make
|
||||
(Table : DISTRIBUTED_DB)
|
||||
(Table : PARAMETRIZED_DISTRIBUTED_DB with type param := unit)
|
||||
(Kind : sig
|
||||
type t
|
||||
val proj: t -> Table.t
|
||||
@ -417,8 +513,8 @@ module Make
|
||||
let known t k = Table.known (Kind.proj t) k
|
||||
let read t k = Table.read (Kind.proj t) k
|
||||
let read_exn t k = Table.read_exn (Kind.proj t) k
|
||||
let prefetch t ?peer k = Table.prefetch (Kind.proj t) ?peer k
|
||||
let fetch t ?peer k = Table.fetch (Kind.proj t) ?peer k
|
||||
let prefetch t ?peer k = Table.prefetch (Kind.proj t) ?peer k ()
|
||||
let fetch t ?peer k = Table.fetch (Kind.proj t) ?peer k ()
|
||||
let commit t k = Table.commit (Kind.proj t) k
|
||||
let inject t k v = Table.inject (Kind.proj t) k v
|
||||
let watch t = Table.watch (Kind.proj t)
|
||||
@ -442,7 +538,73 @@ module Protocol =
|
||||
let proj db = db.protocol_db.table
|
||||
end)
|
||||
|
||||
let inject_block t bytes =
|
||||
module Operation_list = struct
|
||||
|
||||
type t = net
|
||||
type key = Block_hash.t * int
|
||||
type value = Operation_hash.t list
|
||||
type param = Operation_list_list_hash.t
|
||||
|
||||
let proj net = net.operation_list_db.table
|
||||
|
||||
module Table = Raw_operation_list.Table
|
||||
|
||||
let known t k = Table.known (proj t) k
|
||||
let read t k =
|
||||
Table.read (proj t) k >>= function
|
||||
| None -> Lwt.return_none
|
||||
| Some (op, _) -> Lwt.return (Some op)
|
||||
let read_exn t k = Table.read_exn (proj t) k >|= fst
|
||||
let prefetch t ?peer k p = Table.prefetch (proj t) ?peer k p
|
||||
let fetch t ?peer k p = Table.fetch (proj t) ?peer k p >|= fst
|
||||
|
||||
let rec do_read net block acc i =
|
||||
if i <= 0 then
|
||||
Lwt.return []
|
||||
else
|
||||
read_exn net (block, i-1) >>= fun ops ->
|
||||
do_read net block (ops :: acc) (i-1)
|
||||
|
||||
let read_all_opt net block =
|
||||
State.Operation_list.read_count_opt
|
||||
net.net block >>= function
|
||||
| None -> Lwt.return_none
|
||||
| Some len -> do_read net block [] len >>= fun ops -> Lwt.return (Some ops)
|
||||
|
||||
let read_all_exn net block =
|
||||
State.Operation_list.read_count_exn
|
||||
net.net block >>= fun len ->
|
||||
do_read net block [] len
|
||||
|
||||
let rec do_commit net block i =
|
||||
if i <= 0 then
|
||||
Lwt.return_unit
|
||||
else
|
||||
Raw_operation_list.Table.commit
|
||||
net.operation_list_db.table (block, i-1) >>= fun () ->
|
||||
do_commit net block (i-1)
|
||||
|
||||
let commit_all net block len =
|
||||
State.Operation_list.store_count net.net block len >>= fun () ->
|
||||
do_commit net block len
|
||||
|
||||
let inject_all net block opss =
|
||||
State.Operation_list.read_count_opt net.net block >>= function
|
||||
| Some _ -> Lwt.return_false
|
||||
| None ->
|
||||
let hashes = List.map Operation_list_hash.compute opss in
|
||||
Lwt_list.mapi_p
|
||||
(fun i ops ->
|
||||
let path = Operation_list_list_hash.compute_path hashes i in
|
||||
Raw_operation_list.Table.inject
|
||||
net.operation_list_db.table
|
||||
(block, i) (ops, path))
|
||||
opss >>= fun injected ->
|
||||
Lwt.return (List.for_all (fun x -> x) injected)
|
||||
|
||||
end
|
||||
|
||||
let inject_block t bytes operations =
|
||||
let hash = Block_hash.hash_bytes [bytes] in
|
||||
match
|
||||
Data_encoding.Binary.of_bytes Store.Block_header.encoding bytes
|
||||
@ -458,13 +620,45 @@ let inject_block t bytes =
|
||||
| true ->
|
||||
failwith "Previously injected block."
|
||||
| false ->
|
||||
let computed_hash =
|
||||
Operation_list_list_hash.compute
|
||||
(List.map Operation_list_hash.compute operations) in
|
||||
fail_unless
|
||||
(Operation_list_list_hash.compare
|
||||
computed_hash block.shell.operations = 0)
|
||||
(Exn (Failure "Incoherent operation list")) >>=? fun () ->
|
||||
Raw_block_header.Table.inject
|
||||
net_db.block_header_db.table hash block >>= function
|
||||
| false ->
|
||||
failwith "Previously injected block."
|
||||
| true ->
|
||||
Operation_list.inject_all
|
||||
net_db hash operations >>= fun _ ->
|
||||
return (hash, block)
|
||||
|
||||
(*
|
||||
let inject_operation t bytes =
|
||||
let hash = Operation_hash.hash_bytes [bytes] in
|
||||
match Data_encoding.Binary.of_bytes Store.Operation.encoding bytes with
|
||||
| None ->
|
||||
failwith "Cannot parse operations."
|
||||
| Some op ->
|
||||
match get_net t op.shell.net_id with
|
||||
| None ->
|
||||
failwith "Unknown network."
|
||||
| Some net_db ->
|
||||
Operation.known net_db hash >>= function
|
||||
| true ->
|
||||
failwith "Previously injected block."
|
||||
| false ->
|
||||
Raw_operation.Table.inject
|
||||
net_db.operation_db.table hash op >>= function
|
||||
| false ->
|
||||
failwith "Previously injected block."
|
||||
| true ->
|
||||
return (hash, op)
|
||||
*)
|
||||
|
||||
let broadcast_head net head mempool =
|
||||
let msg : Message.t =
|
||||
Current_head (State.Net.id net.net, head, mempool) in
|
||||
|
@ -40,11 +40,11 @@ module type DISTRIBUTED_DB = sig
|
||||
val known: t -> key -> bool Lwt.t
|
||||
val read: t -> key -> value option Lwt.t
|
||||
val read_exn: t -> key -> value Lwt.t
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
val commit: t -> key -> unit Lwt.t
|
||||
val inject: t -> key -> value -> bool Lwt.t
|
||||
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
end
|
||||
|
||||
module Operation :
|
||||
@ -62,11 +62,41 @@ module Protocol :
|
||||
and type key := Protocol_hash.t
|
||||
and type value := Tezos_compiler.Protocol.t
|
||||
|
||||
module Operation_list : sig
|
||||
|
||||
type t = net
|
||||
type key = Block_hash.t * int
|
||||
type value = Operation_hash.t list
|
||||
type param = Operation_list_list_hash.t
|
||||
|
||||
val known: t -> key -> bool Lwt.t
|
||||
val read: t -> key -> value option Lwt.t
|
||||
val read_exn: t -> key -> value Lwt.t
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> value Lwt.t
|
||||
|
||||
val read_all_opt:
|
||||
net -> Block_hash.t -> Operation_hash.t list list option Lwt.t
|
||||
val read_all_exn:
|
||||
net -> Block_hash.t -> Operation_hash.t list list Lwt.t
|
||||
|
||||
val commit_all:
|
||||
net -> Block_hash.t -> int -> unit Lwt.t
|
||||
val inject_all:
|
||||
net -> Block_hash.t -> Operation_hash.t list list -> bool Lwt.t
|
||||
|
||||
end
|
||||
|
||||
val broadcast_head:
|
||||
net -> Block_hash.t -> Operation_hash.t list -> unit
|
||||
|
||||
val inject_block:
|
||||
t -> MBytes.t -> (Block_hash.t * Store.Block_header.t) tzresult Lwt.t
|
||||
t -> MBytes.t -> Operation_hash.t list list ->
|
||||
(Block_hash.t * Store.Block_header.t) tzresult Lwt.t
|
||||
|
||||
(* val inject_operation: *)
|
||||
(* t -> MBytes.t -> *)
|
||||
(* (Block_hash.t * Store.Operation.t) tzresult Lwt.t *)
|
||||
|
||||
val read_block:
|
||||
t -> Block_hash.t -> (net * Store.Block_header.t) option Lwt.t
|
||||
|
@ -7,19 +7,63 @@
|
||||
(* *)
|
||||
(**************************************************************************)
|
||||
|
||||
module type DISTRIBUTED_DB = sig
|
||||
module type PARAMETRIZED_RO_DISTRIBUTED_DB = sig
|
||||
|
||||
type t
|
||||
type key
|
||||
type value
|
||||
type param
|
||||
|
||||
val known: t -> key -> bool Lwt.t
|
||||
val read: t -> key -> value option Lwt.t
|
||||
val read_exn: t -> key -> value Lwt.t
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> value Lwt.t
|
||||
|
||||
end
|
||||
|
||||
module type PARAMETRIZED_DISTRIBUTED_DB = sig
|
||||
|
||||
include PARAMETRIZED_RO_DISTRIBUTED_DB
|
||||
|
||||
val commit: t -> key -> unit Lwt.t
|
||||
(* val commit_invalid: t -> key -> unit Lwt.t *) (* TODO *)
|
||||
val inject: t -> key -> value -> bool Lwt.t
|
||||
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||
|
||||
end
|
||||
|
||||
module type DISTRIBUTED_DB = sig
|
||||
|
||||
include PARAMETRIZED_DISTRIBUTED_DB with type param := unit
|
||||
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
|
||||
end
|
||||
|
||||
module type DISK_TABLE = sig
|
||||
type store
|
||||
type key
|
||||
type value
|
||||
val known: store -> key -> bool Lwt.t
|
||||
val read: store -> key -> value tzresult Lwt.t
|
||||
val read_opt: store -> key -> value option Lwt.t
|
||||
val read_exn: store -> key -> value Lwt.t
|
||||
val store: store -> key -> value -> bool Lwt.t
|
||||
val remove: store -> key -> bool Lwt.t
|
||||
end
|
||||
|
||||
module type MEMORY_TABLE = sig
|
||||
type 'a t
|
||||
type key
|
||||
val create: int -> 'a t
|
||||
val find: 'a t -> key -> 'a
|
||||
val add: 'a t -> key -> 'a -> unit
|
||||
val replace: 'a t -> key -> 'a -> unit
|
||||
val remove: 'a t -> key -> unit
|
||||
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
|
||||
end
|
||||
|
||||
module type SCHEDULER_EVENTS = sig
|
||||
@ -29,16 +73,27 @@ module type SCHEDULER_EVENTS = sig
|
||||
val notify: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_unrequested: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_duplicate: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_invalid: t -> P2p.Peer_id.t -> key -> unit
|
||||
end
|
||||
|
||||
module type PRECHECK = sig
|
||||
type key
|
||||
type param
|
||||
type value
|
||||
val precheck: key -> param -> value -> bool
|
||||
end
|
||||
|
||||
module Make_table
|
||||
(Hash : HASH)
|
||||
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) : sig
|
||||
(Hash : sig type t end)
|
||||
(Disk_table : DISK_TABLE with type key := Hash.t)
|
||||
(Memory_table : MEMORY_TABLE with type key := Hash.t)
|
||||
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t)
|
||||
(Precheck : PRECHECK with type key := Hash.t
|
||||
and type value := Disk_table.value) : sig
|
||||
|
||||
include DISTRIBUTED_DB with type key = Hash.t
|
||||
include PARAMETRIZED_DISTRIBUTED_DB with type key = Hash.t
|
||||
and type value = Disk_table.value
|
||||
and type param = Precheck.param
|
||||
val create:
|
||||
?global_input:(key * value) Watcher.input ->
|
||||
Scheduler.t -> Disk_table.store -> t
|
||||
@ -48,6 +103,7 @@ end = struct
|
||||
|
||||
type key = Hash.t
|
||||
type value = Disk_table.value
|
||||
type param = Precheck.param
|
||||
|
||||
type t = {
|
||||
scheduler: Scheduler.t ;
|
||||
@ -58,7 +114,7 @@ end = struct
|
||||
}
|
||||
|
||||
and status =
|
||||
| Pending of value Lwt.u
|
||||
| Pending of value Lwt.u * param
|
||||
| Found of value
|
||||
|
||||
let known s k =
|
||||
@ -79,24 +135,23 @@ end = struct
|
||||
| Found v -> Lwt.return v
|
||||
| Pending _ -> Lwt.fail Not_found
|
||||
|
||||
let fetch s ?peer k =
|
||||
let fetch s ?peer k param =
|
||||
match Memory_table.find s.memory k with
|
||||
| exception Not_found -> begin
|
||||
Disk_table.read_opt s.disk k >>= function
|
||||
| None ->
|
||||
let waiter, wakener = Lwt.wait () in
|
||||
Memory_table.add s.memory k (Pending wakener) ;
|
||||
Memory_table.add s.memory k (Pending (wakener, param)) ;
|
||||
Scheduler.request s.scheduler peer k ;
|
||||
waiter
|
||||
| Some v -> Lwt.return v
|
||||
end
|
||||
| Pending w -> Lwt.waiter_of_wakener w
|
||||
| Pending (w, _) -> Lwt.waiter_of_wakener w
|
||||
| Found v -> Lwt.return v
|
||||
|
||||
let prefetch s ?peer k = Lwt.ignore_result (fetch s ?peer k)
|
||||
let prefetch s ?peer k param = Lwt.ignore_result (fetch s ?peer k param)
|
||||
|
||||
let notify s p k v =
|
||||
Scheduler.notify s.scheduler p k ;
|
||||
match Memory_table.find s.memory k with
|
||||
| exception Not_found -> begin
|
||||
Disk_table.known s.disk k >>= function
|
||||
@ -107,13 +162,19 @@ end = struct
|
||||
Scheduler.notify_unrequested s.scheduler p k ;
|
||||
Lwt.return_unit
|
||||
end
|
||||
| Pending w ->
|
||||
| Pending (w, param) ->
|
||||
if not (Precheck.precheck k param v) then begin
|
||||
Scheduler.notify_invalid s.scheduler p k ;
|
||||
Lwt.return_unit
|
||||
end else begin
|
||||
Scheduler.notify s.scheduler p k ;
|
||||
Memory_table.replace s.memory k (Found v) ;
|
||||
Lwt.wakeup w v ;
|
||||
iter_option s.global_input
|
||||
~f:(fun input -> Watcher.notify input (k, v)) ;
|
||||
Watcher.notify s.input (k, v) ;
|
||||
Lwt.return_unit
|
||||
end
|
||||
| Found _ ->
|
||||
Scheduler.notify_duplicate s.scheduler p k ;
|
||||
Lwt.return_unit
|
||||
@ -137,7 +198,7 @@ end = struct
|
||||
| exception Not_found -> Lwt.return_unit
|
||||
| Pending _ -> assert false
|
||||
| Found v ->
|
||||
Disk_table.store s.disk v >>= fun _ ->
|
||||
Disk_table.store s.disk k v >>= fun _ ->
|
||||
Memory_table.remove s.memory k ;
|
||||
Lwt.return_unit
|
||||
|
||||
@ -158,8 +219,8 @@ module type REQUEST = sig
|
||||
end
|
||||
|
||||
module Make_request_scheduler
|
||||
(Hash : HASH)
|
||||
(Table : Hashtbl.S with type key := Hash.t)
|
||||
(Hash : sig type t end)
|
||||
(Table : MEMORY_TABLE with type key := Hash.t)
|
||||
(Request : REQUEST with type key := Hash.t) : sig
|
||||
|
||||
type t
|
||||
@ -181,6 +242,7 @@ end = struct
|
||||
and event =
|
||||
| Request of P2p.Peer_id.t option * key
|
||||
| Notify of P2p.Peer_id.t * key
|
||||
| Notify_invalid of P2p.Peer_id.t * key
|
||||
| Notify_duplicate of P2p.Peer_id.t * key
|
||||
| Notify_unrequested of P2p.Peer_id.t * key
|
||||
|
||||
@ -188,6 +250,8 @@ end = struct
|
||||
t.push_to_worker (Request (p, k))
|
||||
let notify t p k =
|
||||
t.push_to_worker (Notify (p, k))
|
||||
let notify_invalid t p k =
|
||||
t.push_to_worker (Notify_invalid (p, k))
|
||||
let notify_duplicate t p k =
|
||||
t.push_to_worker (Notify_duplicate (p, k))
|
||||
let notify_unrequested t p k =
|
||||
@ -240,6 +304,7 @@ end = struct
|
||||
| Notify (_gid, key) ->
|
||||
Table.remove state.pending key ;
|
||||
Lwt.return_unit
|
||||
| Notify_invalid _
|
||||
| Notify_unrequested _
|
||||
| Notify_duplicate _ ->
|
||||
(* TODO *)
|
||||
|
@ -7,19 +7,65 @@
|
||||
(* *)
|
||||
(**************************************************************************)
|
||||
|
||||
module type DISTRIBUTED_DB = sig
|
||||
module type PARAMETRIZED_RO_DISTRIBUTED_DB = sig
|
||||
|
||||
type t
|
||||
type key
|
||||
type value
|
||||
type param
|
||||
|
||||
val known: t -> key -> bool Lwt.t
|
||||
val read: t -> key -> value option Lwt.t
|
||||
val read_exn: t -> key -> value Lwt.t
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> param -> value Lwt.t
|
||||
|
||||
end
|
||||
|
||||
module type PARAMETRIZED_DISTRIBUTED_DB = sig
|
||||
|
||||
include PARAMETRIZED_RO_DISTRIBUTED_DB
|
||||
|
||||
val commit: t -> key -> unit Lwt.t
|
||||
(* val commit_invalid: t -> key -> unit Lwt.t *) (* TODO *)
|
||||
val inject: t -> key -> value -> bool Lwt.t
|
||||
val watch: t -> (key * value) Lwt_stream.t * Watcher.stopper
|
||||
|
||||
end
|
||||
|
||||
module type DISTRIBUTED_DB = sig
|
||||
|
||||
include PARAMETRIZED_DISTRIBUTED_DB with type param := unit
|
||||
|
||||
val prefetch: t -> ?peer:P2p.Peer_id.t -> key -> unit
|
||||
val fetch: t -> ?peer:P2p.Peer_id.t -> key -> value Lwt.t
|
||||
|
||||
end
|
||||
|
||||
module type DISK_TABLE = sig
|
||||
(* A subtype of State.DATA_STORE *)
|
||||
type store
|
||||
type key
|
||||
type value
|
||||
val known: store -> key -> bool Lwt.t
|
||||
val read: store -> key -> value tzresult Lwt.t
|
||||
val read_opt: store -> key -> value option Lwt.t
|
||||
val read_exn: store -> key -> value Lwt.t
|
||||
val store: store -> key -> value -> bool Lwt.t
|
||||
val remove: store -> key -> bool Lwt.t
|
||||
end
|
||||
|
||||
module type MEMORY_TABLE = sig
|
||||
(* A subtype of Hashtbl.S *)
|
||||
type 'a t
|
||||
type key
|
||||
val create: int -> 'a t
|
||||
val find: 'a t -> key -> 'a
|
||||
val add: 'a t -> key -> 'a -> unit
|
||||
val replace: 'a t -> key -> 'a -> unit
|
||||
val remove: 'a t -> key -> unit
|
||||
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
|
||||
end
|
||||
|
||||
module type SCHEDULER_EVENTS = sig
|
||||
@ -29,16 +75,27 @@ module type SCHEDULER_EVENTS = sig
|
||||
val notify: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_unrequested: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_duplicate: t -> P2p.Peer_id.t -> key -> unit
|
||||
val notify_invalid: t -> P2p.Peer_id.t -> key -> unit
|
||||
end
|
||||
|
||||
module type PRECHECK = sig
|
||||
type key
|
||||
type param
|
||||
type value
|
||||
val precheck: key -> param -> value -> bool
|
||||
end
|
||||
|
||||
module Make_table
|
||||
(Hash : HASH)
|
||||
(Disk_table : State.DATA_STORE with type key := Hash.t)
|
||||
(Memory_table : Hashtbl.S with type key := Hash.t)
|
||||
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) : sig
|
||||
(Hash : sig type t end)
|
||||
(Disk_table : DISK_TABLE with type key := Hash.t)
|
||||
(Memory_table : MEMORY_TABLE with type key := Hash.t)
|
||||
(Scheduler : SCHEDULER_EVENTS with type key := Hash.t)
|
||||
(Precheck : PRECHECK with type key := Hash.t
|
||||
and type value := Disk_table.value) : sig
|
||||
|
||||
include DISTRIBUTED_DB with type key = Hash.t
|
||||
include PARAMETRIZED_DISTRIBUTED_DB with type key = Hash.t
|
||||
and type value = Disk_table.value
|
||||
and type param := Precheck.param
|
||||
val create:
|
||||
?global_input:(key * value) Watcher.input ->
|
||||
Scheduler.t -> Disk_table.store -> t
|
||||
@ -54,8 +111,8 @@ module type REQUEST = sig
|
||||
end
|
||||
|
||||
module Make_request_scheduler
|
||||
(Hash : HASH)
|
||||
(Table : Hashtbl.S with type key := Hash.t)
|
||||
(Hash : sig type t end)
|
||||
(Table : MEMORY_TABLE with type key := Hash.t)
|
||||
(Request : REQUEST with type key := Hash.t) : sig
|
||||
|
||||
type t
|
||||
|
@ -27,6 +27,10 @@ type t =
|
||||
| Get_protocols of Protocol_hash.t list
|
||||
| Protocol of Tezos_compiler.Protocol.t
|
||||
|
||||
| Get_operation_list of Net_id.t * (Block_hash.t * int) list
|
||||
| Operation_list of Net_id.t * Block_hash.t * int *
|
||||
Operation_hash.t list * Operation_list_list_hash.path
|
||||
|
||||
let encoding =
|
||||
let open Data_encoding in
|
||||
let case ?max_length ~tag encoding unwrap wrap =
|
||||
@ -34,7 +38,7 @@ let encoding =
|
||||
[
|
||||
case ~tag:0x10
|
||||
(obj1
|
||||
(req "get_current_branch" Net_id.encoding))
|
||||
(req "get_current_branch" Store.Net_id.encoding))
|
||||
(function
|
||||
| Get_current_branch net_id -> Some net_id
|
||||
| _ -> None)
|
||||
@ -118,6 +122,26 @@ let encoding =
|
||||
(function Protocol proto -> Some proto | _ -> None)
|
||||
(fun proto -> Protocol proto);
|
||||
|
||||
case ~tag:0x50
|
||||
(obj2
|
||||
(req "net_id" Net_id.encoding)
|
||||
(req "get_operation_list" (list (tup2 Block_hash.encoding int8))))
|
||||
(function
|
||||
| Get_operation_list (net_id, keys) -> Some (net_id, keys)
|
||||
| _ -> None)
|
||||
(fun (net_id, keys) -> Get_operation_list (net_id, keys));
|
||||
|
||||
case ~tag:0x51
|
||||
(obj4
|
||||
(req "net_id" Net_id.encoding)
|
||||
(req "operation_list" (tup2 Block_hash.encoding int8))
|
||||
(req "operations" (list Operation_hash.encoding))
|
||||
(req "operation_list_path" Operation_list_list_hash.path_encoding))
|
||||
(function Operation_list (net_id, block, ofs, ops, path) ->
|
||||
Some (net_id, (block, ofs), ops, path) | _ -> None)
|
||||
(fun (net_id, (block, ofs), ops, path) ->
|
||||
Operation_list (net_id, block, ofs, ops, path)) ;
|
||||
|
||||
]
|
||||
|
||||
let versions =
|
||||
|
@ -27,6 +27,10 @@ type t =
|
||||
| Get_protocols of Protocol_hash.t list
|
||||
| Protocol of Tezos_compiler.Protocol.t
|
||||
|
||||
| Get_operation_list of Net_id.t * (Block_hash.t * int) list
|
||||
| Operation_list of Net_id.t * Block_hash.t * int *
|
||||
Operation_hash.t list * Operation_list_list_hash.path
|
||||
|
||||
val cfg : t P2p.message_config
|
||||
|
||||
val pp_json : Format.formatter -> t -> unit
|
||||
|
@ -33,7 +33,7 @@ let inject_protocol state ?force:_ proto =
|
||||
"Compilation failed (%a)"
|
||||
Protocol_hash.pp_short hash
|
||||
| true ->
|
||||
State.Protocol.store state proto >>= function
|
||||
State.Protocol.store state hash proto >>= function
|
||||
| false ->
|
||||
failwith
|
||||
"Previously registred protocol (%a)"
|
||||
@ -42,8 +42,10 @@ let inject_protocol state ?force:_ proto =
|
||||
in
|
||||
Lwt.return (hash, validation)
|
||||
|
||||
let inject_block validator ?force bytes =
|
||||
Validator.inject_block validator ?force bytes >>=? fun (hash, block) ->
|
||||
let inject_block validator ?force bytes operations =
|
||||
Validator.inject_block
|
||||
validator ?force
|
||||
bytes operations >>=? fun (hash, block) ->
|
||||
return (hash, (block >>=? fun _ -> return ()))
|
||||
|
||||
type t = {
|
||||
@ -54,7 +56,8 @@ type t = {
|
||||
mainnet_net: State.Net.t ;
|
||||
mainnet_validator: Validator.t ;
|
||||
inject_block:
|
||||
?force:bool -> MBytes.t ->
|
||||
?force:bool ->
|
||||
MBytes.t -> Operation_hash.t list list ->
|
||||
(Block_hash.t * unit tzresult Lwt.t) tzresult Lwt.t ;
|
||||
inject_operation:
|
||||
?force:bool -> MBytes.t ->
|
||||
@ -139,7 +142,8 @@ module RPC = struct
|
||||
fitness: MBytes.t list ;
|
||||
timestamp: Time.t ;
|
||||
protocol: Protocol_hash.t option ;
|
||||
operations: Operation_hash.t list option ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list option ;
|
||||
data: MBytes.t option ;
|
||||
net: Node_rpc_services.Blocks.net ;
|
||||
test_protocol: Protocol_hash.t option ;
|
||||
@ -152,6 +156,7 @@ module RPC = struct
|
||||
fitness = block.fitness ;
|
||||
timestamp = block.timestamp ;
|
||||
protocol = Some block.protocol_hash ;
|
||||
operations_hash = block.operations_hash ;
|
||||
operations = Some block.operations ;
|
||||
data = Some block.proto_header ;
|
||||
net = block.net_id ;
|
||||
@ -166,7 +171,8 @@ module RPC = struct
|
||||
fitness = shell.fitness ;
|
||||
timestamp = shell.timestamp ;
|
||||
protocol = None ;
|
||||
operations = Some shell.operations ;
|
||||
operations_hash = shell.operations ;
|
||||
operations = None ;
|
||||
data = Some proto ;
|
||||
test_protocol = None ;
|
||||
test_network = None ;
|
||||
@ -316,7 +322,7 @@ module RPC = struct
|
||||
let validator, _net = get_net node block in
|
||||
let pv = Validator.prevalidator validator in
|
||||
let { Updater.applied }, _ = Prevalidator.operations pv in
|
||||
Lwt.return applied
|
||||
Lwt.return [applied]
|
||||
| `Hash hash->
|
||||
read_valid_block node hash >|= function
|
||||
| None -> []
|
||||
|
@ -26,7 +26,8 @@ module RPC : sig
|
||||
type block_info = Node_rpc_services.Blocks.block_info
|
||||
|
||||
val inject_block:
|
||||
t -> ?force:bool -> MBytes.t ->
|
||||
t -> ?force:bool ->
|
||||
MBytes.t -> Operation_hash.t list list ->
|
||||
(Block_hash.t * unit tzresult Lwt.t) tzresult Lwt.t
|
||||
(** [inject_block node ?force bytes] tries to insert [bytes]
|
||||
(supposedly the serialization of a block header) inside
|
||||
@ -58,7 +59,7 @@ module RPC : sig
|
||||
t -> block -> block_info Lwt.t
|
||||
|
||||
val operations:
|
||||
t -> block -> Operation_hash.t list Lwt.t
|
||||
t -> block -> Operation_hash.t list list Lwt.t
|
||||
val operation_content:
|
||||
t -> Operation_hash.t -> Store.Operation.t option Lwt.t
|
||||
val operation_watcher:
|
||||
|
@ -307,12 +307,13 @@ let list_operations node {Services.Operations.monitor; contents} =
|
||||
let include_ops = match contents with None -> false | Some x -> x in
|
||||
Node.RPC.operations node `Prevalidation >>= fun operations ->
|
||||
Lwt_list.map_p
|
||||
(Lwt_list.map_p
|
||||
(fun hash ->
|
||||
if include_ops then
|
||||
Node.RPC.operation_content node hash >>= fun op ->
|
||||
Lwt.return (hash, op)
|
||||
else
|
||||
Lwt.return (hash, None))
|
||||
Lwt.return (hash, None)))
|
||||
operations >>= fun operations ->
|
||||
if not monitor then
|
||||
RPC.Answer.return operations
|
||||
@ -324,8 +325,8 @@ let list_operations node {Services.Operations.monitor; contents} =
|
||||
if not !first_request then
|
||||
Lwt_stream.get stream >>= function
|
||||
| None -> Lwt.return_none
|
||||
| Some (h, op) when include_ops -> Lwt.return (Some [h, Some op])
|
||||
| Some (h, _) -> Lwt.return (Some [h, None])
|
||||
| Some (h, op) when include_ops -> Lwt.return (Some [[h, Some op]])
|
||||
| Some (h, _) -> Lwt.return (Some [[h, None]])
|
||||
else begin
|
||||
first_request := false ;
|
||||
Lwt.return (Some operations)
|
||||
@ -416,9 +417,12 @@ let build_rpc_directory node =
|
||||
RPC.Answer.return res in
|
||||
RPC.register0 dir Services.validate_block implementation in
|
||||
let dir =
|
||||
let implementation (block, blocking, force) =
|
||||
let implementation
|
||||
{ Node_rpc_services.raw ; blocking ; force ; operations } =
|
||||
begin
|
||||
Node.RPC.inject_block node ?force block >>=? fun (hash, wait) ->
|
||||
Node.RPC.inject_block
|
||||
node ~force
|
||||
raw operations >>=? fun (hash, wait) ->
|
||||
(if blocking then wait else return ()) >>=? fun () -> return hash
|
||||
end >>= RPC.Answer.return in
|
||||
RPC.register0 dir Services.inject_block implementation in
|
||||
|
@ -66,7 +66,8 @@ module Blocks = struct
|
||||
fitness: MBytes.t list ;
|
||||
timestamp: Time.t ;
|
||||
protocol: Protocol_hash.t option ;
|
||||
operations: Operation_hash.t list option ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list option ;
|
||||
data: MBytes.t option ;
|
||||
net: net ;
|
||||
test_protocol: Protocol_hash.t option ;
|
||||
@ -75,25 +76,32 @@ module Blocks = struct
|
||||
|
||||
let block_info_encoding =
|
||||
conv
|
||||
(fun { hash ; predecessor ; fitness ; timestamp ; protocol ; operations ;
|
||||
net ; test_protocol ; test_network ; data } ->
|
||||
(hash, predecessor, fitness, timestamp, protocol, operations,
|
||||
net, test_protocol, test_network, data))
|
||||
(fun (hash, predecessor, fitness, timestamp, protocol, operations,
|
||||
net, test_protocol, test_network, data) ->
|
||||
{ hash ; predecessor ; fitness ; timestamp ; protocol ; operations ;
|
||||
net ; test_protocol ; test_network ; data })
|
||||
(obj10
|
||||
(fun { hash ; predecessor ; fitness ; timestamp ; protocol ;
|
||||
operations_hash ; operations ; data ; net ;
|
||||
test_protocol ; test_network } ->
|
||||
((hash, predecessor, fitness, timestamp, protocol),
|
||||
(operations_hash, operations, data,
|
||||
net, test_protocol, test_network)))
|
||||
(fun ((hash, predecessor, fitness, timestamp, protocol),
|
||||
(operations_hash, operations, data,
|
||||
net, test_protocol, test_network)) ->
|
||||
{ hash ; predecessor ; fitness ; timestamp ; protocol ;
|
||||
operations_hash ; operations ; data ; net ;
|
||||
test_protocol ; test_network })
|
||||
(merge_objs
|
||||
(obj5
|
||||
(req "hash" Block_hash.encoding)
|
||||
(req "predecessor" Block_hash.encoding)
|
||||
(req "fitness" Fitness.encoding)
|
||||
(req "timestamp" Time.encoding)
|
||||
(opt "protocol" Protocol_hash.encoding)
|
||||
(opt "operations" (list Operation_hash.encoding))
|
||||
(req "net_id" net_encoding)
|
||||
(opt "protocol" Protocol_hash.encoding))
|
||||
(obj6
|
||||
(req "operations_hash" Operation_list_list_hash.encoding)
|
||||
(opt "operations" (list (list Operation_hash.encoding)))
|
||||
(opt "data" bytes)
|
||||
(req "net" net_encoding)
|
||||
(opt "test_protocol" Protocol_hash.encoding)
|
||||
(opt "test_network" (tup2 net_encoding Time.encoding))
|
||||
(opt "data" bytes))
|
||||
(opt "test_network" (tup2 net_encoding Time.encoding))))
|
||||
|
||||
let parse_block s =
|
||||
try
|
||||
@ -231,7 +239,7 @@ module Blocks = struct
|
||||
RPC.service
|
||||
~description:"List the block operations."
|
||||
~input: empty
|
||||
~output: (obj1 (req "operations" (list Operation_hash.encoding)))
|
||||
~output: (obj1 (req "operations" (list (list Operation_hash.encoding))))
|
||||
RPC.Path.(block_path / "operations")
|
||||
|
||||
let protocol =
|
||||
@ -436,12 +444,13 @@ module Operations = struct
|
||||
~output:
|
||||
(obj1
|
||||
(req "operations"
|
||||
(list
|
||||
(list
|
||||
(obj2
|
||||
(req "hash" Operation_hash.encoding)
|
||||
(opt "contents"
|
||||
(dynamic_size Updater.raw_operation_encoding)))
|
||||
)))
|
||||
))))
|
||||
RPC.Path.(root / "operations")
|
||||
|
||||
end
|
||||
@ -637,7 +646,7 @@ let forge_block =
|
||||
(opt "predecessor" Block_hash.encoding)
|
||||
(opt "timestamp" Time.encoding)
|
||||
(req "fitness" Fitness.encoding)
|
||||
(req "operations" (list Operation_hash.encoding))
|
||||
(req "operations" Operation_list_list_hash.encoding)
|
||||
(req "header" bytes))
|
||||
~output: (obj1 (req "block" bytes))
|
||||
RPC.Path.(root / "forge_block")
|
||||
@ -654,35 +663,50 @@ let validate_block =
|
||||
(Error.wrap @@ empty)
|
||||
RPC.Path.(root / "validate_block")
|
||||
|
||||
let inject_block =
|
||||
RPC.service
|
||||
~description:
|
||||
"Inject a block in the node and broadcast it. The `operations` \
|
||||
embedded in `blockHeader` might pre-validated using a \
|
||||
contextual RPCs from the latest block \
|
||||
(e.g. '/blocks/head/context/preapply'). Returns the ID of the \
|
||||
block. By default, the RPC will wait for the block to be \
|
||||
validated before answering."
|
||||
~input:
|
||||
(conv
|
||||
(fun (block, blocking, force) ->
|
||||
(block, Some blocking, force))
|
||||
(fun (block, blocking, force) ->
|
||||
(block, Utils.unopt ~default:true blocking, force))
|
||||
(obj3
|
||||
type inject_block_param = {
|
||||
raw: MBytes.t ;
|
||||
blocking: bool ;
|
||||
force: bool ;
|
||||
operations: Operation_hash.t list list ;
|
||||
}
|
||||
|
||||
let inject_block_param =
|
||||
conv
|
||||
(fun { raw ; blocking ; force ; operations } ->
|
||||
(raw, blocking, force, operations))
|
||||
(fun (raw, blocking, force, operations) ->
|
||||
{ raw ; blocking ; force ; operations })
|
||||
(obj4
|
||||
(req "data" bytes)
|
||||
(opt "blocking"
|
||||
(dft "blocking"
|
||||
(describe
|
||||
~description:
|
||||
"Should the RPC wait for the block to be \
|
||||
validated before answering. (default: true)"
|
||||
bool))
|
||||
(opt "force"
|
||||
bool)
|
||||
true)
|
||||
(dft "force"
|
||||
(describe
|
||||
~description:
|
||||
"Should we inject the block when its fitness is below \
|
||||
the current head. (default: false)"
|
||||
bool))))
|
||||
bool)
|
||||
false)
|
||||
(req "operations"
|
||||
(describe
|
||||
~description:"..."
|
||||
(list (list Operation_hash.encoding)))))
|
||||
|
||||
let inject_block =
|
||||
RPC.service
|
||||
~description:
|
||||
"Inject a block in the node and broadcast it. The `operations` \
|
||||
embedded in `blockHeader` might be pre-validated using a \
|
||||
contextual RPCs from the latest block \
|
||||
(e.g. '/blocks/head/context/preapply'). Returns the ID of the \
|
||||
block. By default, the RPC will wait for the block to be \
|
||||
validated before answering."
|
||||
~input: inject_block_param
|
||||
~output:
|
||||
(Error.wrap @@
|
||||
(obj1 (req "block_hash" Block_hash.encoding)))
|
||||
|
@ -34,7 +34,8 @@ module Blocks : sig
|
||||
fitness: MBytes.t list ;
|
||||
timestamp: Time.t ;
|
||||
protocol: Protocol_hash.t option ;
|
||||
operations: Operation_hash.t list option ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list option ;
|
||||
data: MBytes.t option ;
|
||||
net: net ;
|
||||
test_protocol: Protocol_hash.t option ;
|
||||
@ -56,7 +57,7 @@ module Blocks : sig
|
||||
val fitness:
|
||||
(unit, unit * block, unit, MBytes.t list) RPC.service
|
||||
val operations:
|
||||
(unit, unit * block, unit, Operation_hash.t list) RPC.service
|
||||
(unit, unit * block, unit, Operation_hash.t list list) RPC.service
|
||||
val protocol:
|
||||
(unit, unit * block, unit, Protocol_hash.t) RPC.service
|
||||
val test_protocol:
|
||||
@ -108,7 +109,7 @@ module Operations : sig
|
||||
}
|
||||
val list:
|
||||
(unit, unit,
|
||||
list_param, (Operation_hash.t * Store.Operation.t option) list) RPC.service
|
||||
list_param, (Operation_hash.t * Store.Operation.t option) list list) RPC.service
|
||||
end
|
||||
|
||||
module Protocols : sig
|
||||
@ -170,16 +171,21 @@ end
|
||||
val forge_block:
|
||||
(unit, unit,
|
||||
Updater.Net_id.t option * Block_hash.t option * Time.t option *
|
||||
Fitness.fitness * Operation_hash.t list * MBytes.t,
|
||||
Fitness.fitness * Operation_list_list_hash.t * MBytes.t,
|
||||
MBytes.t) RPC.service
|
||||
|
||||
val validate_block:
|
||||
(unit, unit, Blocks.net * Block_hash.t, unit tzresult) RPC.service
|
||||
|
||||
type inject_block_param = {
|
||||
raw: MBytes.t ;
|
||||
blocking: bool ;
|
||||
force: bool ;
|
||||
operations: Operation_hash.t list list ;
|
||||
}
|
||||
|
||||
val inject_block:
|
||||
(unit, unit,
|
||||
(MBytes.t * bool * bool option),
|
||||
Block_hash.t tzresult) RPC.service
|
||||
(unit, unit, inject_block_param, Block_hash.t tzresult) RPC.service
|
||||
|
||||
val inject_operation:
|
||||
(unit, unit,
|
||||
|
@ -48,22 +48,26 @@ let list_pendings net_db ~from_block ~to_block old_mempool =
|
||||
Lwt.return mempool
|
||||
else
|
||||
Distributed_db.Block_header.read_exn net_db hash >>= fun { shell } ->
|
||||
Distributed_db.Operation_list.read_all_exn
|
||||
net_db hash >>= fun operations ->
|
||||
let mempool =
|
||||
List.fold_left
|
||||
(fun mempool h -> Operation_hash.Set.add h mempool)
|
||||
mempool shell.operations in
|
||||
(List.fold_left (fun mempool h -> Operation_hash.Set.add h mempool))
|
||||
mempool operations in
|
||||
pop_blocks ancestor shell.predecessor mempool
|
||||
in
|
||||
let push_block mempool (_hash, shell) =
|
||||
let push_block mempool (hash, _shell) =
|
||||
Distributed_db.Operation_list.read_all_exn
|
||||
net_db hash >|= fun operations ->
|
||||
List.fold_left
|
||||
(fun mempool h -> Operation_hash.Set.remove h mempool)
|
||||
mempool shell.Store.Block_header.operations
|
||||
(List.fold_left (fun mempool h -> Operation_hash.Set.remove h mempool))
|
||||
mempool operations
|
||||
in
|
||||
let net_state = Distributed_db.state net_db in
|
||||
State.Valid_block.Current.new_blocks
|
||||
net_state ~from_block ~to_block >>= fun (ancestor, path) ->
|
||||
pop_blocks ancestor from_block.hash old_mempool >>= fun mempool ->
|
||||
let new_mempool = List.fold_left push_block mempool path in
|
||||
Lwt_list.fold_left_s push_block mempool path >>= fun new_mempool ->
|
||||
Lwt.return new_mempool
|
||||
|
||||
|
||||
|
@ -12,7 +12,12 @@ open Logging.Node.State
|
||||
module Net_id = Store.Net_id
|
||||
|
||||
type error +=
|
||||
| Invalid_fitness of Fitness.fitness * Fitness.fitness
|
||||
| Invalid_fitness of { block: Block_hash.t ;
|
||||
expected: Fitness.fitness ;
|
||||
found: Fitness.fitness }
|
||||
| Invalid_operations of { block: Block_hash.t ;
|
||||
expected: Operation_list_list_hash.t ;
|
||||
found: Operation_hash.t list list }
|
||||
| Unknown_network of Net_id.t
|
||||
| Unknown_operation of Operation_hash.t
|
||||
| Unknown_block of Block_hash.t
|
||||
@ -27,18 +32,22 @@ let () =
|
||||
~title:"Invalid fitness"
|
||||
~description:"The computed fitness differs from the fitness found \
|
||||
\ in the block header."
|
||||
~pp:(fun ppf (expected, found) ->
|
||||
~pp:(fun ppf (block, expected, found) ->
|
||||
Format.fprintf ppf
|
||||
"@[<v 2>Invalid fitness@ \
|
||||
"@[<v 2>Invalid fitness for block %a@ \
|
||||
\ expected %a@ \
|
||||
\ found %a"
|
||||
Block_hash.pp_short block
|
||||
Fitness.pp expected
|
||||
Fitness.pp found)
|
||||
Data_encoding.(obj2
|
||||
Data_encoding.(obj3
|
||||
(req "block" Block_hash.encoding)
|
||||
(req "expected" Fitness.encoding)
|
||||
(req "found" Fitness.encoding))
|
||||
(function Invalid_fitness (e, f) -> Some (e, f) | _ -> None)
|
||||
(fun (e, f) -> Invalid_fitness (e, f)) ;
|
||||
(function Invalid_fitness { block ; expected ; found } ->
|
||||
Some (block, expected, found) | _ -> None)
|
||||
(fun (block, expected, found) ->
|
||||
Invalid_fitness { block ; expected ; found }) ;
|
||||
Error_monad.register_error_kind
|
||||
`Temporary
|
||||
~id:"state.unknown_network"
|
||||
@ -105,7 +114,8 @@ and valid_block = {
|
||||
pred: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Protocol.fitness ;
|
||||
operations: Operation_hash.t list ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
discovery_time: Time.t ;
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
||||
@ -119,7 +129,8 @@ and valid_block = {
|
||||
}
|
||||
|
||||
let build_valid_block
|
||||
hash header context discovery_time successors invalid_successors =
|
||||
hash header operations
|
||||
context discovery_time successors invalid_successors =
|
||||
Context.get_protocol context >>= fun protocol_hash ->
|
||||
Context.get_test_protocol context >>= fun test_protocol_hash ->
|
||||
Context.get_test_network context >>= fun test_network ->
|
||||
@ -137,7 +148,8 @@ let build_valid_block
|
||||
pred = header.shell.predecessor ;
|
||||
timestamp = header.shell.timestamp ;
|
||||
discovery_time ;
|
||||
operations = header.shell.operations ;
|
||||
operations_hash = header.shell.operations ;
|
||||
operations ;
|
||||
fitness = header.shell.fitness ;
|
||||
protocol_hash ;
|
||||
protocol ;
|
||||
@ -176,7 +188,7 @@ module type DATA_STORE = sig
|
||||
val read_discovery_time_opt: store -> key -> Time.t option Lwt.t
|
||||
val read_discovery_time_exn: store -> key -> Time.t Lwt.t
|
||||
|
||||
val store: store -> value -> bool Lwt.t
|
||||
val store: store -> key -> value -> bool Lwt.t
|
||||
val store_raw: store -> key -> MBytes.t -> value option tzresult Lwt.t
|
||||
val remove: store -> key -> bool Lwt.t
|
||||
|
||||
@ -263,14 +275,12 @@ end = struct
|
||||
S.Contents.read_opt (s, k) >>= function
|
||||
| None -> Lwt.return_none
|
||||
| Some v -> Lwt.return (Some { Time.data = Ok v ; time })
|
||||
let store s v =
|
||||
let bytes = Data_encoding.Binary.to_bytes S.encoding v in
|
||||
let k = S.hash_raw bytes in
|
||||
let store s k v =
|
||||
S.Discovery_time.known s k >>= function
|
||||
| true -> Lwt.return_false
|
||||
| false ->
|
||||
let time = Time.now () in
|
||||
S.RawContents.store (s, k) bytes >>= fun () ->
|
||||
S.Contents.store (s, k) v >>= fun () ->
|
||||
S.Discovery_time.store s k time >>= fun () ->
|
||||
S.Pending.store s k >>= fun () ->
|
||||
Lwt.return_true
|
||||
@ -366,7 +376,7 @@ end = struct
|
||||
let read_discovery_time = atomic2 Locked.read_discovery_time
|
||||
let read_discovery_time_opt = atomic2 Locked.read_discovery_time_opt
|
||||
let read_discovery_time_exn = atomic2 Locked.read_discovery_time_exn
|
||||
let store = atomic2 Locked.store
|
||||
let store = atomic3 Locked.store
|
||||
let store_raw = atomic3 Locked.store_raw
|
||||
let remove = atomic2 Locked.remove
|
||||
let mark_valid = atomic2 Locked.mark_valid
|
||||
@ -391,6 +401,121 @@ module Raw_operation =
|
||||
end)
|
||||
(Operation_hash.Set)
|
||||
|
||||
module Raw_operation_list = struct
|
||||
|
||||
module Locked = struct
|
||||
|
||||
let known store (hash, ofs) =
|
||||
Store.Block_header.Operation_list.known (store, hash) ofs
|
||||
let read store (hash, ofs) =
|
||||
Store.Block_header.Operation_list.read
|
||||
(store, hash) ofs >>=? fun ops ->
|
||||
Store.Block_header.Operation_list_path.read
|
||||
(store, hash) ofs >>=? fun path ->
|
||||
return (ops, path)
|
||||
let read_opt store (hash, ofs) =
|
||||
Store.Block_header.Operation_list.read_opt
|
||||
(store, hash) ofs >>= function
|
||||
| None -> Lwt.return_none
|
||||
| Some ops ->
|
||||
Store.Block_header.Operation_list_path.read_exn
|
||||
(store, hash) ofs >>= fun path ->
|
||||
Lwt.return (Some (ops, path))
|
||||
let read_exn store (hash, ofs) =
|
||||
read_opt store (hash, ofs) >>= function
|
||||
| None -> Lwt.fail Not_found
|
||||
| Some (ops, path) -> Lwt.return (ops, path)
|
||||
let store store (hash, ofs) (ops, path) =
|
||||
Store.Block_header.Operation_list.known
|
||||
(store, hash) ofs >>= function
|
||||
| false ->
|
||||
Store.Block_header.Operation_list.store
|
||||
(store, hash) ofs ops >>= fun () ->
|
||||
Store.Block_header.Operation_list_path.store
|
||||
(store, hash) ofs path >>= fun () ->
|
||||
Lwt.return_true
|
||||
| true ->
|
||||
Lwt.return_false
|
||||
|
||||
let remove store (hash, ofs) =
|
||||
Store.Block_header.Operation_list.known
|
||||
(store, hash) ofs >>= function
|
||||
| false ->
|
||||
Lwt.return_false
|
||||
| true ->
|
||||
Store.Block_header.Operation_list.remove
|
||||
(store, hash) ofs >>= fun () ->
|
||||
Store.Block_header.Operation_list_path.remove
|
||||
(store, hash) ofs >>= fun () ->
|
||||
Lwt.return_true
|
||||
|
||||
let read_count store hash =
|
||||
Store.Block_header.Operation_list_count.read (store, hash)
|
||||
|
||||
let read_count_opt store hash =
|
||||
read_count store hash >>= function
|
||||
| Ok cpt -> Lwt.return (Some cpt)
|
||||
| Error _ -> Lwt.return_none
|
||||
|
||||
let read_count_exn store hash =
|
||||
read_count store hash >>= function
|
||||
| Ok cpt -> Lwt.return cpt
|
||||
| Error _ -> Lwt.fail Not_found
|
||||
|
||||
let store_count store hash count =
|
||||
Store.Block_header.Operation_list_count.store (store, hash) count
|
||||
|
||||
let read_all store hash =
|
||||
Store.Block_header.Operation_list_count.read (store, hash)
|
||||
>>=? fun operation_list_count ->
|
||||
let rec read acc i =
|
||||
if i <= 0 then return acc
|
||||
else
|
||||
Store.Block_header.Operation_list.read
|
||||
(store, hash) (i-1) >>=? fun ops ->
|
||||
read (ops :: acc) (i-1) in
|
||||
read [] operation_list_count
|
||||
|
||||
let read_all_exn store hash =
|
||||
read_all store hash >>= function
|
||||
| Error _ -> Lwt.fail Not_found
|
||||
| Ok ops -> Lwt.return ops
|
||||
|
||||
let store_all store hash op_hashes operations =
|
||||
Store.Block_header.Operation_list_count.store (store, hash)
|
||||
(List.length operations) >>= fun () ->
|
||||
Lwt_list.iteri_p
|
||||
(fun i ops ->
|
||||
Store.Block_header.Operation_list.store
|
||||
(store, hash) i ops >>= fun () ->
|
||||
Store.Block_header.Operation_list_path.store
|
||||
(store, hash) i
|
||||
(Operation_list_list_hash.compute_path op_hashes i)
|
||||
>>= fun () ->
|
||||
Lwt.return_unit)
|
||||
operations >>= fun () ->
|
||||
Lwt.return_unit
|
||||
|
||||
end
|
||||
|
||||
let atomic1 f s = Shared.use s f
|
||||
let atomic2 f s k = Shared.use s (fun s -> f s k)
|
||||
let atomic3 f s k v = Shared.use s (fun s -> f s k v)
|
||||
let atomic4 f s k v1 v2 = Shared.use s (fun s -> f s k v1 v2)
|
||||
|
||||
let known = atomic2 Locked.known
|
||||
let read = atomic2 Locked.read
|
||||
let read_opt = atomic2 Locked.read_opt
|
||||
let read_exn = atomic2 Locked.read_exn
|
||||
let store = atomic3 Locked.store
|
||||
let remove = atomic2 Locked.remove
|
||||
|
||||
let store_all = atomic4 Locked.store_all
|
||||
let read_all = atomic2 Locked.read_all
|
||||
let read_all_exn = atomic2 Locked.read_all_exn
|
||||
|
||||
end
|
||||
|
||||
module Raw_block_header = struct
|
||||
|
||||
include
|
||||
@ -419,13 +544,14 @@ module Raw_block_header = struct
|
||||
predecessor = genesis.block ;
|
||||
timestamp = genesis.time ;
|
||||
fitness = [] ;
|
||||
operations = [] ;
|
||||
operations = Operation_list_list_hash.empty ;
|
||||
} in
|
||||
let header =
|
||||
{ Store.Block_header.shell ; proto = MBytes.create 0 } in
|
||||
let bytes =
|
||||
Data_encoding.Binary.to_bytes Store.Block_header.encoding header in
|
||||
Locked.store_raw store genesis.block bytes >>= fun _created ->
|
||||
Raw_operation_list.Locked.store_all store genesis.block [] [] >>= fun () ->
|
||||
Lwt.return header
|
||||
|
||||
let store_testnet_genesis store genesis =
|
||||
@ -434,7 +560,7 @@ module Raw_block_header = struct
|
||||
predecessor = genesis.block ;
|
||||
timestamp = genesis.time ;
|
||||
fitness = [] ;
|
||||
operations = [] ;
|
||||
operations = Operation_list_list_hash.empty ;
|
||||
} in
|
||||
let bytes =
|
||||
Data_encoding.Binary.to_bytes Store.Block_header.encoding {
|
||||
@ -442,6 +568,7 @@ module Raw_block_header = struct
|
||||
proto = MBytes.create 0 ;
|
||||
} in
|
||||
Locked.store_raw store genesis.block bytes >>= fun _created ->
|
||||
Raw_operation_list.Locked.store_all store genesis.block [] [] >>= fun () ->
|
||||
Lwt.return shell
|
||||
|
||||
end
|
||||
@ -569,8 +696,8 @@ module Block_header = struct
|
||||
net_id: Net_id.t ;
|
||||
predecessor: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
operations: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
operations: Operation_hash.t list ;
|
||||
}
|
||||
|
||||
type t = Store.Block_header.t = {
|
||||
@ -598,6 +725,9 @@ module Block_header = struct
|
||||
| Some _ | None -> Lwt.return_none
|
||||
let read_pred_exn = wrap_not_found read_pred_opt
|
||||
|
||||
let read_operations s k =
|
||||
Raw_operation_list.read_all s.block_header_store k
|
||||
|
||||
let mark_invalid net hash errors =
|
||||
mark_invalid net hash errors >>= fun marked ->
|
||||
if not marked then
|
||||
@ -678,6 +808,45 @@ module Block_header = struct
|
||||
|
||||
end
|
||||
|
||||
module Operation_list = struct
|
||||
|
||||
type store = net
|
||||
type key = Block_hash.t * int
|
||||
type value = Operation_hash.t list * Operation_list_list_hash.path
|
||||
|
||||
module Locked = Raw_operation_list.Locked
|
||||
|
||||
let atomic1 f s =
|
||||
Shared.use s.block_header_store f
|
||||
let atomic2 f s k =
|
||||
Shared.use s.block_header_store (fun s -> f s k)
|
||||
let atomic3 f s k v =
|
||||
Shared.use s.block_header_store (fun s -> f s k v)
|
||||
let atomic4 f s k v1 v2 =
|
||||
Shared.use s.block_header_store (fun s -> f s k v1 v2)
|
||||
|
||||
let known = atomic2 Locked.known
|
||||
let read = atomic2 Locked.read
|
||||
let read_opt = atomic2 Locked.read_opt
|
||||
let read_exn = atomic2 Locked.read_exn
|
||||
let store = atomic3 Locked.store
|
||||
let remove = atomic2 Locked.remove
|
||||
|
||||
let store_all s k v =
|
||||
Shared.use s.block_header_store begin fun s ->
|
||||
let h = List.map Operation_list_hash.compute v in
|
||||
Locked.store_all s k h v
|
||||
end
|
||||
let read_all = atomic2 Locked.read_all
|
||||
let read_all_exn = atomic2 Locked.read_all_exn
|
||||
|
||||
let read_count = atomic2 Locked.read_count
|
||||
let read_count_opt = atomic2 Locked.read_count_opt
|
||||
let read_count_exn = atomic2 Locked.read_count_exn
|
||||
let store_count = atomic3 Locked.store_count
|
||||
|
||||
end
|
||||
|
||||
module Raw_net = struct
|
||||
|
||||
let build
|
||||
@ -741,7 +910,7 @@ module Raw_net = struct
|
||||
Lwt.return context
|
||||
end >>= fun context ->
|
||||
build_valid_block
|
||||
genesis.block header context genesis.time
|
||||
genesis.block header [] context genesis.time
|
||||
Block_hash.Set.empty Block_hash.Set.empty >>= fun genesis_block ->
|
||||
Lwt.return @@
|
||||
build
|
||||
@ -765,7 +934,8 @@ module Valid_block = struct
|
||||
pred: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
operations: Operation_hash.t list ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
discovery_time: Time.t ;
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
||||
@ -784,7 +954,7 @@ module Valid_block = struct
|
||||
let known { context_index } hash =
|
||||
Context.exists context_index hash
|
||||
|
||||
let raw_read block time chain_store context_index hash =
|
||||
let raw_read block operations time chain_store context_index hash =
|
||||
Context.checkout context_index hash >>= function
|
||||
| None ->
|
||||
fail (Unknown_context hash)
|
||||
@ -793,11 +963,12 @@ module Valid_block = struct
|
||||
>>= fun successors ->
|
||||
Store.Chain.Invalid_successors.read_all (chain_store, hash)
|
||||
>>= fun invalid_successors ->
|
||||
build_valid_block hash block context time successors invalid_successors >>= fun block ->
|
||||
build_valid_block hash block operations
|
||||
context time successors invalid_successors >>= fun block ->
|
||||
return block
|
||||
|
||||
let raw_read_exn block time chain_store context_index hash =
|
||||
raw_read block time chain_store context_index hash >>= function
|
||||
let raw_read_exn block operations time chain_store context_index hash =
|
||||
raw_read block operations time chain_store context_index hash >>= function
|
||||
| Error _ -> Lwt.fail Not_found
|
||||
| Ok data -> Lwt.return data
|
||||
|
||||
@ -806,7 +977,8 @@ module Valid_block = struct
|
||||
| None | Some { Time.data = Error _ } ->
|
||||
fail (Unknown_block hash)
|
||||
| Some { Time.data = Ok block ; time } ->
|
||||
raw_read block
|
||||
Block_header.read_operations net hash >>=? fun operations ->
|
||||
raw_read block operations
|
||||
time net_state.chain_store net_state.context_index hash
|
||||
|
||||
let read_opt net net_state hash =
|
||||
@ -834,7 +1006,10 @@ module Valid_block = struct
|
||||
fail_unless
|
||||
(Fitness.equal fitness block.Store.Block_header.shell.fitness)
|
||||
(Invalid_fitness
|
||||
(block.Store.Block_header.shell.fitness, fitness)) >>=? fun () ->
|
||||
{ block = hash ;
|
||||
expected = block.Store.Block_header.shell.fitness ;
|
||||
found = fitness ;
|
||||
}) >>=? fun () ->
|
||||
begin (* Patch context about the associated test network. *)
|
||||
Context.read_and_reset_fork_test_network
|
||||
context >>= fun (fork, context) ->
|
||||
@ -862,6 +1037,8 @@ module Valid_block = struct
|
||||
Raw_block_header.Locked.mark_valid
|
||||
block_header_store hash >>= fun _marked ->
|
||||
(* TODO fail if the block was previsouly stored ... ??? *)
|
||||
Operation_list.Locked.read_all
|
||||
block_header_store hash >>=? fun operations ->
|
||||
(* Let's commit the context. *)
|
||||
Context.commit hash context >>= fun () ->
|
||||
(* Update the chain state. *)
|
||||
@ -873,7 +1050,7 @@ module Valid_block = struct
|
||||
(store, predecessor) hash >>= fun () ->
|
||||
(* Build the `valid_block` value. *)
|
||||
raw_read_exn
|
||||
block discovery_time
|
||||
block operations discovery_time
|
||||
net_state.chain_store net_state.context_index hash >>= fun valid_block ->
|
||||
Watcher.notify valid_block_watcher valid_block ;
|
||||
Lwt.return (Ok valid_block)
|
||||
@ -1058,11 +1235,14 @@ module Valid_block = struct
|
||||
lwt_debug "pop_block %a" Block_hash.pp_short hash >>= fun () ->
|
||||
Raw_block_header.read_exn
|
||||
block_header_store hash >>= fun { shell } ->
|
||||
Raw_operation_list.read_all_exn
|
||||
block_header_store hash >>= fun operations ->
|
||||
let operations = List.concat operations in
|
||||
Lwt_list.iter_p
|
||||
(fun h ->
|
||||
Raw_operation.Locked.unmark operation_store h >>= fun _ ->
|
||||
Lwt.return_unit)
|
||||
shell.operations >>= fun () ->
|
||||
operations >>= fun () ->
|
||||
Store.Chain.In_chain_insertion_time.remove
|
||||
(state.chain_store, hash) >>= fun () ->
|
||||
Store.Chain.Successor_in_chain.remove
|
||||
@ -1076,11 +1256,14 @@ module Valid_block = struct
|
||||
Store.Chain.Successor_in_chain.store
|
||||
(state.chain_store,
|
||||
shell.Store.Block_header.predecessor) hash >>= fun () ->
|
||||
Raw_operation_list.read_all_exn
|
||||
block_header_store hash >>= fun operations ->
|
||||
let operations = List.concat operations in
|
||||
Lwt_list.iter_p
|
||||
(fun h ->
|
||||
Raw_operation.Locked.mark_valid operation_store h >>= fun _ ->
|
||||
Lwt.return_unit)
|
||||
shell.operations
|
||||
operations
|
||||
in
|
||||
let time = Time.now () in
|
||||
new_blocks
|
||||
@ -1165,7 +1348,7 @@ module Net = struct
|
||||
Block_header.Locked.read_discovery_time block_header_store
|
||||
genesis_hash >>=? fun genesis_discovery_time ->
|
||||
Valid_block.Locked.raw_read
|
||||
genesis_shell_header genesis_discovery_time
|
||||
genesis_shell_header [] genesis_discovery_time
|
||||
chain_store context_index genesis_hash >>=? fun genesis_block ->
|
||||
return @@
|
||||
Raw_net.build
|
||||
|
@ -36,7 +36,12 @@ val read:
|
||||
(** {2 Errors} **************************************************************)
|
||||
|
||||
type error +=
|
||||
| Invalid_fitness of Fitness.fitness * Fitness.fitness
|
||||
| Invalid_fitness of { block: Block_hash.t ;
|
||||
expected: Fitness.fitness ;
|
||||
found: Fitness.fitness }
|
||||
| Invalid_operations of { block: Block_hash.t ;
|
||||
expected: Operation_list_list_hash.t ;
|
||||
found: Operation_hash.t list list }
|
||||
| Unknown_network of Store.Net_id.t
|
||||
| Unknown_operation of Operation_hash.t
|
||||
| Unknown_block of Block_hash.t
|
||||
@ -119,7 +124,7 @@ module type DATA_STORE = sig
|
||||
returns [false] when the value is already stored, or [true]
|
||||
otherwise. For a given value, only one call to `store` (or an
|
||||
equivalent call to `store_raw`) might return [true]. *)
|
||||
val store: store -> value -> bool Lwt.t
|
||||
val store: store -> key -> value -> bool Lwt.t
|
||||
|
||||
(** Store a value in the local database (unparsed data). It returns
|
||||
[Ok None] when the data is already stored, or [Ok (Some (hash,
|
||||
@ -143,8 +148,8 @@ module Block_header : sig
|
||||
net_id: Net_id.t ;
|
||||
predecessor: Block_hash.t ;
|
||||
timestamp: Time.t ;
|
||||
operations: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
operations: Operation_hash.t list ;
|
||||
}
|
||||
|
||||
type t = Store.Block_header.t = {
|
||||
@ -205,6 +210,31 @@ module Block_header : sig
|
||||
|
||||
end
|
||||
|
||||
module Operation_list : sig
|
||||
|
||||
type store = Net.t
|
||||
type key = Block_hash.t * int
|
||||
type value = Operation_hash.t list * Operation_list_list_hash.path
|
||||
|
||||
val known: store -> key -> bool Lwt.t
|
||||
val read: store -> key -> value tzresult Lwt.t
|
||||
val read_opt: store -> key -> value option Lwt.t
|
||||
val read_exn: store -> key -> value Lwt.t
|
||||
val store: store -> key -> value -> bool Lwt.t
|
||||
val remove: store -> key -> bool Lwt.t
|
||||
|
||||
val read_count: store -> Block_hash.t -> int tzresult Lwt.t
|
||||
val read_count_opt: store -> Block_hash.t -> int option Lwt.t
|
||||
val read_count_exn: store -> Block_hash.t -> int Lwt.t
|
||||
val store_count: store -> Block_hash.t -> int -> unit Lwt.t
|
||||
|
||||
val read_all:
|
||||
store -> Block_hash.t -> Operation_hash.t list list tzresult Lwt.t
|
||||
val store_all:
|
||||
store -> Block_hash.t -> Operation_hash.t list list -> unit Lwt.t
|
||||
|
||||
end
|
||||
|
||||
|
||||
(** {2 Valid block} ***********************************************************)
|
||||
|
||||
@ -223,8 +253,9 @@ module Valid_block : sig
|
||||
(** The date at which this block has been forged. *)
|
||||
fitness: Protocol.fitness ;
|
||||
(** The (validated) score of the block. *)
|
||||
operations: Operation_hash.t list ;
|
||||
(** The sequence of operations. *)
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
(** The sequence of operations ans its (Merkle-)hash. *)
|
||||
discovery_time: Time.t ;
|
||||
(** The data at which the block was discorevered on the P2P network. *)
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
|
@ -15,7 +15,8 @@ type worker = {
|
||||
get_exn: State.Net_id.t -> t Lwt.t ;
|
||||
deactivate: t -> unit Lwt.t ;
|
||||
inject_block:
|
||||
?force:bool -> MBytes.t ->
|
||||
?force:bool ->
|
||||
MBytes.t -> Operation_hash.t list list ->
|
||||
(Block_hash.t * State.Valid_block.t tzresult Lwt.t) tzresult Lwt.t ;
|
||||
notify_block: Block_hash.t -> Store.Block_header.t -> unit Lwt.t ;
|
||||
shutdown: unit -> unit Lwt.t ;
|
||||
@ -152,9 +153,11 @@ let apply_block net db
|
||||
>>= fun () ->
|
||||
lwt_log_info "validation of %a: looking for dependencies..."
|
||||
Block_hash.pp_short hash >>= fun () ->
|
||||
Distributed_db.Operation_list.fetch
|
||||
db (hash, 0) block.shell.operations >>= fun operation_hashes ->
|
||||
Lwt_list.map_p
|
||||
(fun op -> Distributed_db.Operation.fetch db op)
|
||||
block.shell.operations >>= fun operations ->
|
||||
operation_hashes >>= fun operations ->
|
||||
lwt_debug "validation of %a: found operations"
|
||||
Block_hash.pp_short hash >>= fun () ->
|
||||
begin (* Are we validating a block in an expired test network ? *)
|
||||
@ -194,7 +197,7 @@ let apply_block net db
|
||||
(fun op_hash raw ->
|
||||
Lwt.return (Proto.parse_operation op_hash raw)
|
||||
|> trace (Invalid_operation op_hash))
|
||||
block.Store.Block_header.shell.operations
|
||||
operation_hashes
|
||||
operations >>=? fun parsed_operations ->
|
||||
lwt_debug "validation of %a: applying block..."
|
||||
Block_hash.pp_short hash >>= fun () ->
|
||||
@ -290,22 +293,27 @@ module Context_db = struct
|
||||
match data with
|
||||
| Ok data ->
|
||||
Distributed_db.Block_header.commit net_db hash >>= fun () ->
|
||||
Distributed_db.Operation_list.commit_all
|
||||
net_db hash 1 >>= fun () ->
|
||||
begin
|
||||
State.Valid_block.store net_state hash data >>=? function
|
||||
| None ->
|
||||
State.Valid_block.read net_state hash >>=? fun block ->
|
||||
Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash)
|
||||
Lwt_list.iter_p
|
||||
(Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash))
|
||||
block.operations >>= fun () ->
|
||||
return (Ok block, false)
|
||||
| Some block ->
|
||||
Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash)
|
||||
Lwt_list.iter_p
|
||||
(Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash))
|
||||
block.operations >>= fun () ->
|
||||
return (Ok block, true)
|
||||
end
|
||||
| Error err ->
|
||||
State.Block_header.mark_invalid net_state hash err >>= fun changed ->
|
||||
State.Block_header.mark_invalid
|
||||
net_state hash err >>= fun changed ->
|
||||
return (Error err, changed)
|
||||
end >>= function
|
||||
| Ok (block, changed) ->
|
||||
@ -704,9 +712,25 @@ let create_worker state db =
|
||||
validators [] in
|
||||
Lwt.join (maintenance_worker :: validators) in
|
||||
|
||||
let inject_block ?(force = false) bytes =
|
||||
Distributed_db.inject_block db bytes >>=? fun (hash, block) ->
|
||||
let inject_block ?(force = false) bytes operations =
|
||||
Distributed_db.inject_block db bytes operations >>=? fun (hash, block) ->
|
||||
get block.shell.net_id >>=? fun net ->
|
||||
(*
|
||||
Lwt_list.filter_map_s
|
||||
(fun bytes ->
|
||||
let hash = Operation_hash.hash_bytes [bytes] in
|
||||
match Data_encoding.
|
||||
Distributed_db.Operation.inject net.net_db hash bytes >>= function
|
||||
| false -> Lwt.return_none
|
||||
| true ->
|
||||
if List.exists
|
||||
(List.exists (Operation_hash.equal hash))
|
||||
operations then
|
||||
Lwt.return (Some hash)
|
||||
else
|
||||
Lwt.return_none)
|
||||
injected_operations >>= fun injected_operations ->
|
||||
*)
|
||||
let validation =
|
||||
State.Valid_block.Current.head net.net >>= fun head ->
|
||||
if force
|
||||
|
@ -32,7 +32,8 @@ val fetch_block:
|
||||
t -> Block_hash.t -> State.Valid_block.t tzresult Lwt.t
|
||||
|
||||
val inject_block:
|
||||
worker -> ?force:bool -> MBytes.t ->
|
||||
worker -> ?force:bool ->
|
||||
MBytes.t -> Operation_hash.t list list ->
|
||||
(Block_hash.t * State.Valid_block.t tzresult Lwt.t) tzresult Lwt.t
|
||||
|
||||
val prevalidator: t -> Prevalidator.t
|
||||
|
@ -33,12 +33,12 @@ type shell_block = Store.Block_header.shell_header =
|
||||
(** The preceding block in the chain. *)
|
||||
timestamp: Time.t ;
|
||||
(** The date at which this block has been forged. *)
|
||||
operations: Operation_list_list_hash.t ;
|
||||
(** The sequence of operations. *)
|
||||
fitness: MBytes.t list ;
|
||||
(** The announced score of the block. As a sequence of sequences
|
||||
of unsigned bytes. Ordered by length and then by contents
|
||||
lexicographically. *)
|
||||
operations: Operation_hash.t list ;
|
||||
(** The sequence of operations. *)
|
||||
}
|
||||
|
||||
type raw_block = Store.Block_header.t = {
|
||||
|
@ -40,12 +40,12 @@ type shell_block = Store.Block_header.shell_header = {
|
||||
(** The preceding block in the chain. *)
|
||||
timestamp: Time.t ;
|
||||
(** The date at which this block has been forged. *)
|
||||
operations: Operation_list_list_hash.t ;
|
||||
(** The sequence of operations. *)
|
||||
fitness: MBytes.t list ;
|
||||
(** The announced score of the block. As a sequence of sequences
|
||||
of unsigned bytes. Ordered by length and then by contents
|
||||
lexicographically. *)
|
||||
operations: Operation_hash.t list ;
|
||||
(** The sequence of operations. *)
|
||||
}
|
||||
let shell_block_encoding = Store.Block_header.shell_header_encoding
|
||||
|
||||
|
@ -31,12 +31,12 @@ type shell_block = Store.Block_header.shell_header = {
|
||||
(** The preceding block in the chain. *)
|
||||
timestamp: Time.t ;
|
||||
(** The date at which this block has been forged. *)
|
||||
operations: Operation_list_list_hash.t ;
|
||||
(** The sequence of operations. *)
|
||||
fitness: MBytes.t list ;
|
||||
(** The announced score of the block. As a sequence of sequences
|
||||
of unsigned bytes. Ordered by length and then by contents
|
||||
lexicographically. *)
|
||||
operations: Operation_hash.t list ;
|
||||
(** The sequence of operations. *)
|
||||
}
|
||||
val shell_block_encoding: shell_block Data_encoding.t
|
||||
|
||||
|
@ -570,7 +570,7 @@ module Helpers = struct
|
||||
(req "predecessor" Block_hash.encoding)
|
||||
(req "timestamp" Timestamp.encoding)
|
||||
(req "fitness" Fitness.encoding)
|
||||
(req "operations" (list Operation_hash.encoding))
|
||||
(req "operations" Operation_list_list_hash.encoding)
|
||||
(req "level" Raw_level.encoding)
|
||||
(req "priority" int31)
|
||||
(req "nonce_hash" Nonce_hash.encoding)
|
||||
|
@ -71,6 +71,20 @@ module type HASH = sig
|
||||
|
||||
end
|
||||
|
||||
module type MERKLE_TREE = sig
|
||||
type elt
|
||||
include HASH
|
||||
val compute: elt list -> t
|
||||
val empty: t
|
||||
type path =
|
||||
| Left of path * t
|
||||
| Right of t * path
|
||||
| Op
|
||||
val compute_path: elt list -> int -> path
|
||||
val check_path: path -> elt -> t * int
|
||||
val path_encoding: path Data_encoding.t
|
||||
end
|
||||
|
||||
(** {2 Building Hashes} *******************************************************)
|
||||
|
||||
(** The parameters for creating a new Hash type using
|
||||
@ -111,5 +125,12 @@ module Block_hash : HASH
|
||||
(** Operations hashes / IDs. *)
|
||||
module Operation_hash : HASH
|
||||
|
||||
(** List of operations hashes / IDs. *)
|
||||
module Operation_list_hash :
|
||||
MERKLE_TREE with type elt = Operation_hash.t
|
||||
|
||||
module Operation_list_list_hash :
|
||||
MERKLE_TREE with type elt = Operation_list_hash.t
|
||||
|
||||
(** Protocol versions / source hashes. *)
|
||||
module Protocol_hash : HASH
|
||||
|
@ -27,12 +27,12 @@ type shell_block = {
|
||||
(** The preceding block in the chain. *)
|
||||
timestamp: Time.t ;
|
||||
(** The date at which this block has been forged. *)
|
||||
operations: Operation_list_list_hash.t ;
|
||||
(** The sequence of operations. *)
|
||||
fitness: MBytes.t list ;
|
||||
(** The announced score of the block. As a sequence of sequences
|
||||
of unsigned bytes. Ordered by length and then by contents
|
||||
lexicographically. *)
|
||||
operations: Operation_hash.t list ;
|
||||
(** The sequence of operations. *)
|
||||
}
|
||||
val shell_block_encoding: shell_block Data_encoding.t
|
||||
|
||||
|
@ -53,6 +53,9 @@ let int64_to_bytes i =
|
||||
MBytes.set_int64 b 0 i;
|
||||
b
|
||||
|
||||
let operations =
|
||||
Operation_list_list_hash.compute [Operation_list_hash.empty]
|
||||
|
||||
let rpc_services : Context.t RPC.directory =
|
||||
let dir = RPC.empty in
|
||||
let dir =
|
||||
@ -60,8 +63,8 @@ let rpc_services : Context.t RPC.directory =
|
||||
dir
|
||||
(Forge.block RPC.Path.root)
|
||||
(fun _ctxt ((net_id, predecessor, timestamp, fitness), command) ->
|
||||
let shell = { Updater.net_id ; predecessor ; timestamp ;
|
||||
fitness ; operations = [] } in
|
||||
let shell = { Updater.net_id ; predecessor ; timestamp ; fitness ;
|
||||
operations } in
|
||||
let bytes = Data.Command.forge shell command in
|
||||
RPC.Answer.return bytes) in
|
||||
dir
|
||||
|
@ -292,6 +292,8 @@ module Prefix = struct
|
||||
(* 32 *)
|
||||
let block_hash = "\001\052" (* B(51) *)
|
||||
let operation_hash = "\005\116" (* o(51) *)
|
||||
let operation_list_hash = "\133\233" (* Lo(52) *)
|
||||
let operation_list_list_hash = "\029\159\109" (* LLo(53) *)
|
||||
let protocol_hash = "\002\170" (* P(51) *)
|
||||
|
||||
(* 20 *)
|
||||
|
@ -13,6 +13,8 @@ module Prefix : sig
|
||||
|
||||
val block_hash: string
|
||||
val operation_hash: string
|
||||
val operation_list_hash: string
|
||||
val operation_list_list_hash: string
|
||||
val protocol_hash: string
|
||||
val ed25519_public_key_hash: string
|
||||
val cryptobox_public_key_hash: string
|
||||
|
@ -98,6 +98,34 @@ module type INTERNAL_HASH = sig
|
||||
module Table : Hashtbl.S with type key = t
|
||||
end
|
||||
|
||||
module type INTERNAL_MERKLE_TREE = sig
|
||||
type elt
|
||||
include INTERNAL_HASH
|
||||
val compute: elt list -> t
|
||||
val empty: t
|
||||
type path =
|
||||
| Left of path * t
|
||||
| Right of t * path
|
||||
| Op
|
||||
val compute_path: elt list -> int -> path
|
||||
val check_path: path -> elt -> t * int
|
||||
val path_encoding: path Data_encoding.t
|
||||
end
|
||||
|
||||
module type MERKLE_TREE = sig
|
||||
type elt
|
||||
include HASH
|
||||
val compute: elt list -> t
|
||||
val empty: t
|
||||
type path =
|
||||
| Left of path * t
|
||||
| Right of t * path
|
||||
| Op
|
||||
val compute_path: elt list -> int -> path
|
||||
val check_path: path -> elt -> t * int
|
||||
val path_encoding: path Data_encoding.t
|
||||
end
|
||||
|
||||
module type Name = sig
|
||||
val name: string
|
||||
val title: string
|
||||
@ -297,36 +325,148 @@ module Make_Blake2B (R : sig
|
||||
|
||||
end
|
||||
|
||||
(*-- Hash sets and maps -----------------------------------------------------*)
|
||||
module Generic_Merkle_tree (H : sig
|
||||
type t
|
||||
type elt
|
||||
val encoding : t Data_encoding.t
|
||||
val empty : t
|
||||
val leaf : elt -> t
|
||||
val node : t -> t -> t
|
||||
end) = struct
|
||||
|
||||
module Hash_set (Hash : HASH) = struct
|
||||
include Set.Make (Hash)
|
||||
let encoding =
|
||||
Data_encoding.conv
|
||||
elements
|
||||
(fun l -> List.fold_left (fun m x -> add x m) empty l)
|
||||
Data_encoding.(list Hash.encoding)
|
||||
let rec step a n =
|
||||
let m = (n+1) / 2 in
|
||||
for i = 0 to m - 1 do
|
||||
a.(i) <- H.node a.(2*i) a.(2*i+1)
|
||||
done ;
|
||||
a.(m) <- H.node a.(n) a.(n) ;
|
||||
if m = 1 then
|
||||
a.(0)
|
||||
else if m mod 2 = 0 then
|
||||
step a m
|
||||
else begin
|
||||
a.(m+1) <- a.(m) ;
|
||||
step a (m+1)
|
||||
end
|
||||
|
||||
module Hash_map (Hash : HASH) = struct
|
||||
include Map.Make (Hash)
|
||||
let encoding arg_encoding =
|
||||
Data_encoding.conv
|
||||
bindings
|
||||
(fun l -> List.fold_left (fun m (k,v) -> add k v m) empty l)
|
||||
Data_encoding.(list (tup2 Hash.encoding arg_encoding))
|
||||
let empty = H.empty
|
||||
|
||||
let compute xs =
|
||||
match xs with
|
||||
| [] -> H.empty
|
||||
| [x] -> H.leaf x
|
||||
| _ :: _ :: _ ->
|
||||
let last = Utils.list_last_exn xs in
|
||||
let n = List.length xs in
|
||||
let a = Array.make (n+1) (H.leaf last) in
|
||||
List.iteri (fun i x -> a.(i) <- H.leaf x) xs ;
|
||||
step a n
|
||||
|
||||
type path =
|
||||
| Left of path * H.t
|
||||
| Right of H.t * path
|
||||
| Op
|
||||
|
||||
let rec step_path a n p j =
|
||||
let m = (n+1) / 2 in
|
||||
let p = if j mod 2 = 0 then Left (p, a.(j+1)) else Right (a.(j-1), p) in
|
||||
for i = 0 to m - 1 do
|
||||
a.(i) <- H.node a.(2*i) a.(2*i+1)
|
||||
done ;
|
||||
a.(m) <- H.node a.(n) a.(n) ;
|
||||
if m = 1 then
|
||||
p
|
||||
else if m mod 2 = 0 then
|
||||
step_path a m p (j/2)
|
||||
else begin
|
||||
a.(m+1) <- a.(m) ;
|
||||
step_path a (m+1) p (j/2)
|
||||
end
|
||||
|
||||
module Hash_table (Hash : MINIMAL_HASH)
|
||||
: Hashtbl.S with type key = Hash.t
|
||||
= Hashtbl.Make (struct
|
||||
type t = Hash.t
|
||||
let equal = Hash.equal
|
||||
let hash v =
|
||||
let raw_hash = Hash.to_string v in
|
||||
let int64_hash = EndianString.BigEndian.get_int64 raw_hash 0 in
|
||||
Int64.to_int int64_hash
|
||||
let compute_path xs i =
|
||||
match xs with
|
||||
| [] -> invalid_arg "compute_path"
|
||||
| [_] -> Op
|
||||
| _ :: _ :: _ ->
|
||||
let last = Utils.list_last_exn xs in
|
||||
let n = List.length xs in
|
||||
if i < 0 || n <= i then invalid_arg "compute_path" ;
|
||||
let a = Array.make (n+1) (H.leaf last) in
|
||||
List.iteri (fun i x -> a.(i) <- H.leaf x) xs ;
|
||||
step_path a n Op i
|
||||
|
||||
let rec check_path p h =
|
||||
match p with
|
||||
| Op ->
|
||||
H.leaf h, 1, 0
|
||||
| Left (p, r) ->
|
||||
let l, s, pos = check_path p h in
|
||||
H.node l r, s * 2, pos
|
||||
| Right (l, p) ->
|
||||
let r, s, pos = check_path p h in
|
||||
H.node l r, s * 2, pos + s
|
||||
|
||||
let check_path p h =
|
||||
let h, _, pos = check_path p h in
|
||||
h, pos
|
||||
|
||||
let path_encoding =
|
||||
let open Data_encoding in
|
||||
mu "path"
|
||||
(fun path_encoding ->
|
||||
union [
|
||||
case ~tag:240
|
||||
(obj2
|
||||
(req "path" path_encoding)
|
||||
(req "right" H.encoding))
|
||||
(function Left (p, r) -> Some (p, r) | _ -> None)
|
||||
(fun (p, r) -> Left (p, r)) ;
|
||||
case ~tag:15
|
||||
(obj2
|
||||
(req "left" H.encoding)
|
||||
(req "path" path_encoding))
|
||||
(function Right (r, p) -> Some (r, p) | _ -> None)
|
||||
(fun (r, p) -> Right (r, p)) ;
|
||||
case ~tag:0
|
||||
unit
|
||||
(function Op -> Some () | _ -> None)
|
||||
(fun () -> Op)
|
||||
])
|
||||
|
||||
end
|
||||
|
||||
module Make_merkle_tree
|
||||
(R : sig
|
||||
val register_encoding:
|
||||
prefix: string ->
|
||||
length:int ->
|
||||
to_raw: ('a -> string) ->
|
||||
of_raw: (string -> 'a option) ->
|
||||
wrap: ('a -> Base58.data) ->
|
||||
'a Base58.encoding
|
||||
end)
|
||||
(K : PrefixedName)
|
||||
(Contents: sig
|
||||
type t
|
||||
val to_bytes: t -> MBytes.t
|
||||
end) = struct
|
||||
|
||||
include Make_Blake2B (R) (K)
|
||||
|
||||
type elt = Contents.t
|
||||
|
||||
let empty = hash_bytes []
|
||||
|
||||
include Generic_Merkle_tree(struct
|
||||
type nonrec t = t
|
||||
type nonrec elt = elt
|
||||
let encoding = encoding
|
||||
let empty = empty
|
||||
let leaf x = hash_bytes [Contents.to_bytes x]
|
||||
let node x y = hash_bytes [to_bytes x; to_bytes y]
|
||||
end)
|
||||
|
||||
end
|
||||
|
||||
(*-- Pre-instanciated hashes ------------------------------------------------*)
|
||||
|
||||
@ -346,6 +486,22 @@ module Operation_hash =
|
||||
let size = None
|
||||
end)
|
||||
|
||||
module Operation_list_hash =
|
||||
Make_merkle_tree (Base58) (struct
|
||||
let name = "Operation_list_hash"
|
||||
let title = "A list of operations"
|
||||
let b58check_prefix = Base58.Prefix.operation_list_hash
|
||||
let size = None
|
||||
end) (Operation_hash)
|
||||
|
||||
module Operation_list_list_hash =
|
||||
Make_merkle_tree (Base58) (struct
|
||||
let name = "Operation_list_list_hash"
|
||||
let title = "A list of list of operations"
|
||||
let b58check_prefix = Base58.Prefix.operation_list_list_hash
|
||||
let size = None
|
||||
end) (Operation_list_hash)
|
||||
|
||||
module Protocol_hash =
|
||||
Make_Blake2B (Base58) (struct
|
||||
let name = "Protocol_hash"
|
||||
@ -364,4 +520,6 @@ module Generic_hash =
|
||||
let () =
|
||||
Base58.check_encoded_prefix Block_hash.b58check_encoding "B" 51 ;
|
||||
Base58.check_encoded_prefix Operation_hash.b58check_encoding "o" 51 ;
|
||||
Base58.check_encoded_prefix Operation_list_hash.b58check_encoding "Lo" 52 ;
|
||||
Base58.check_encoded_prefix Operation_list_list_hash.b58check_encoding "LLo" 53 ;
|
||||
Base58.check_encoded_prefix Protocol_hash.b58check_encoding "P" 51
|
||||
|
@ -90,6 +90,34 @@ module type INTERNAL_HASH = sig
|
||||
module Table : Hashtbl.S with type key = t
|
||||
end
|
||||
|
||||
module type INTERNAL_MERKLE_TREE = sig
|
||||
type elt
|
||||
include INTERNAL_HASH
|
||||
val compute: elt list -> t
|
||||
val empty: t
|
||||
type path =
|
||||
| Left of path * t
|
||||
| Right of t * path
|
||||
| Op
|
||||
val compute_path: elt list -> int -> path
|
||||
val check_path: path -> elt -> t * int
|
||||
val path_encoding: path Data_encoding.t
|
||||
end
|
||||
|
||||
module type MERKLE_TREE = sig
|
||||
type elt
|
||||
include HASH
|
||||
val compute: elt list -> t
|
||||
val empty: t
|
||||
type path =
|
||||
| Left of path * t
|
||||
| Right of t * path
|
||||
| Op
|
||||
val compute_path: elt list -> int -> path
|
||||
val check_path: path -> elt -> t * int
|
||||
val path_encoding: path Data_encoding.t
|
||||
end
|
||||
|
||||
(** {2 Building Hashes} *******************************************************)
|
||||
|
||||
(** The parameters for creating a new Hash type using
|
||||
@ -136,7 +164,33 @@ end
|
||||
(** Operations hashes / IDs. *)
|
||||
module Operation_hash : INTERNAL_HASH
|
||||
|
||||
(** List of operations hashes / IDs. *)
|
||||
module Operation_list_hash :
|
||||
INTERNAL_MERKLE_TREE with type elt = Operation_hash.t
|
||||
|
||||
module Operation_list_list_hash :
|
||||
INTERNAL_MERKLE_TREE with type elt = Operation_list_hash.t
|
||||
|
||||
(** Protocol versions / source hashes. *)
|
||||
module Protocol_hash : INTERNAL_HASH
|
||||
|
||||
module Generic_hash : INTERNAL_MINIMAL_HASH
|
||||
|
||||
(**/**)
|
||||
|
||||
module Generic_Merkle_tree (H : sig
|
||||
type t
|
||||
type elt
|
||||
val encoding : t Data_encoding.t
|
||||
val empty : t
|
||||
val leaf : elt -> t
|
||||
val node : t -> t -> t
|
||||
end) : sig
|
||||
val compute : H.elt list -> H.t
|
||||
type path =
|
||||
| Left of path * H.t
|
||||
| Right of H.t * path
|
||||
| Op
|
||||
val compute_path: H.elt list -> int -> path
|
||||
val check_path: path -> H.elt -> H.t * int
|
||||
end
|
||||
|
@ -1,5 +1,6 @@
|
||||
|
||||
TESTS := \
|
||||
merkle \
|
||||
data-encoding \
|
||||
store context state \
|
||||
basic basic.sh \
|
||||
@ -246,6 +247,29 @@ test-lwt-pipe: ${NODELIB} ${TEST_PIPE_IMPLS:.ml=.cmx}
|
||||
clean::
|
||||
rm -f test-p2p
|
||||
|
||||
############################################################################
|
||||
## Merkle test program
|
||||
|
||||
.PHONY:build-test-merkle run-test-merkle
|
||||
build-test-merkle: test-merkle
|
||||
run-test-merkle:
|
||||
./test-merkle
|
||||
|
||||
TEST_MERKLE_INTFS =
|
||||
|
||||
TEST_MERKLE_IMPLS = \
|
||||
lib/assert.ml \
|
||||
lib/test.ml \
|
||||
test_merkle.ml
|
||||
|
||||
${TEST_MERKLE_IMPLS:.ml=.cmx}: ${NODELIB}
|
||||
test-merkle: ${NODELIB} ${TEST_MERKLE_IMPLS:.ml=.cmx}
|
||||
ocamlfind ocamlopt -linkall -linkpkg ${OCAMLFLAGS} -o $@ $^
|
||||
|
||||
clean::
|
||||
rm -f test-merkle
|
||||
|
||||
|
||||
############################################################################
|
||||
## data encoding test program
|
||||
|
||||
|
@ -50,9 +50,7 @@ let equal_string_option ?msg o1 o2 =
|
||||
|
||||
let equal_error_monad ?msg exn1 exn2 =
|
||||
let msg = format_msg msg in
|
||||
let prn exn = match exn with
|
||||
| Error_monad.Exn err -> Printexc.to_string err
|
||||
| Error_monad.Unclassified err -> err in
|
||||
let prn err = Format.asprintf "%a" Error_monad.pp_print_error [err] in
|
||||
Assert.equal ?msg ~prn exn1 exn2
|
||||
|
||||
let equal_block_set ?msg set1 set2 =
|
||||
|
86
test/test_merkle.ml
Normal file
86
test/test_merkle.ml
Normal file
@ -0,0 +1,86 @@
|
||||
(**************************************************************************)
|
||||
(* *)
|
||||
(* Copyright (c) 2014 - 2016. *)
|
||||
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||
(* *)
|
||||
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||
(* *)
|
||||
(**************************************************************************)
|
||||
|
||||
open Error_monad
|
||||
open Hash
|
||||
|
||||
let rec (--) i j =
|
||||
if j < i then []
|
||||
else i :: (i+1) -- j
|
||||
|
||||
type tree =
|
||||
| Empty
|
||||
| Leaf of int
|
||||
| Node of tree * tree
|
||||
|
||||
let rec list_of_tree = function
|
||||
| Empty -> [], 0
|
||||
| Leaf x -> [x], 1
|
||||
| Node (x, y) ->
|
||||
let x, sx = list_of_tree x
|
||||
and y, sy = list_of_tree y in
|
||||
assert (sx = sy) ;
|
||||
x @ y, sx + sy
|
||||
|
||||
module Merkle = Hash.Generic_Merkle_tree(struct
|
||||
type t = tree
|
||||
type elt = int
|
||||
let empty = Empty
|
||||
let leaf i = Leaf i
|
||||
let node x y = Node (x, y)
|
||||
let encoding =
|
||||
(* Fake... *)
|
||||
Data_encoding.conv (fun _ -> 0) (fun _ -> Empty) Data_encoding.int31
|
||||
end)
|
||||
|
||||
let rec compare_list xs ys =
|
||||
match xs, ys with
|
||||
| [], [] -> true
|
||||
| [x], y :: ys when x = y -> ys = [] || compare_list xs ys
|
||||
| x :: xs, y :: ys when x = y -> compare_list xs ys
|
||||
| _, _ -> false
|
||||
|
||||
let check_size i =
|
||||
let l = 0 -- i in
|
||||
let l2, _ = list_of_tree (Merkle.compute l) in
|
||||
if compare_list l l2 then
|
||||
return ()
|
||||
else
|
||||
failwith "Failed for %d: %a"
|
||||
i
|
||||
(Format.pp_print_list
|
||||
~pp_sep:(fun ppf () -> Format.pp_print_string ppf ";")
|
||||
Format.pp_print_int)
|
||||
l2
|
||||
|
||||
let test_compute _ =
|
||||
iter_s check_size (0--99)
|
||||
|
||||
let check_path i =
|
||||
let l = 0 -- i in
|
||||
let orig = Merkle.compute l in
|
||||
iter_s (fun j ->
|
||||
let path = Merkle.compute_path l j in
|
||||
let found, pos = Merkle.check_path path j in
|
||||
if found = orig && j = pos then
|
||||
return ()
|
||||
else
|
||||
failwith "Failed for %d in %d." j i)
|
||||
l
|
||||
|
||||
let test_path _ =
|
||||
iter_s check_path (0--128)
|
||||
|
||||
let tests : (string * (string -> unit tzresult Lwt.t)) list = [
|
||||
"compute", test_compute ;
|
||||
"path", test_path ;
|
||||
]
|
||||
|
||||
let () =
|
||||
Test.run "merkel." tests
|
@ -62,6 +62,9 @@ let operation op =
|
||||
Data_encoding.Binary.to_bytes Store.Operation.encoding op
|
||||
|
||||
let block state ?(operations = []) pred_hash pred name : Store.Block_header.t =
|
||||
let operations =
|
||||
Operation_list_list_hash.compute
|
||||
[Operation_list_hash.compute operations] in
|
||||
let fitness = incr_fitness pred.Store.Block_header.shell.fitness in
|
||||
let timestamp = incr_timestamp pred.shell.timestamp in
|
||||
{ shell = {
|
||||
@ -76,7 +79,7 @@ let build_chain state tbl otbl pred names =
|
||||
(fun (pred_hash, pred) name ->
|
||||
begin
|
||||
let oph, op, bytes = operation name in
|
||||
State.Operation.store state op >>= fun created ->
|
||||
State.Operation.store state oph op >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
State.Operation.read_opt state oph >>= fun op' ->
|
||||
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||
@ -84,9 +87,9 @@ let build_chain state tbl otbl pred names =
|
||||
Assert.is_true ~msg:__LOC__ store_invalid ;
|
||||
Hashtbl.add otbl name (oph, Error []) ;
|
||||
let block = block ~operations:[oph] state pred_hash pred name in
|
||||
State.Block_header.store state block >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
let hash = Store.Block_header.hash block in
|
||||
State.Block_header.store state hash block >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
State.Block_header.read_opt state hash >>= fun block' ->
|
||||
Assert.equal_block ~msg:__LOC__ (Some block) block' ;
|
||||
State.Block_header.mark_invalid state hash [] >>= fun store_invalid ->
|
||||
@ -104,6 +107,9 @@ let build_chain state tbl otbl pred names =
|
||||
|
||||
let block state ?(operations = []) (pred: State.Valid_block.t) name
|
||||
: State.Block_header.t =
|
||||
let operations =
|
||||
Operation_list_list_hash.compute
|
||||
[Operation_list_hash.compute operations] in
|
||||
let fitness = incr_fitness pred.fitness in
|
||||
let timestamp = incr_timestamp pred.timestamp in
|
||||
{ shell = { net_id = pred.net_id ;
|
||||
@ -117,15 +123,16 @@ let build_valid_chain state tbl vtbl otbl pred names =
|
||||
(fun pred name ->
|
||||
begin
|
||||
let oph, op, bytes = operation name in
|
||||
State.Operation.store state op >>= fun created ->
|
||||
State.Operation.store state oph op >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
State.Operation.read_opt state oph >>= fun op' ->
|
||||
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||
Hashtbl.add otbl name (oph, Ok op) ;
|
||||
let block = block state ~operations:[oph] pred name in
|
||||
State.Block_header.store state block >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
let hash = Store.Block_header.hash block in
|
||||
State.Block_header.store state hash block >>= fun created ->
|
||||
Assert.is_true ~msg:__LOC__ created ;
|
||||
State.Operation_list.store_all state hash [[oph]] >>= fun () ->
|
||||
State.Block_header.read_opt state hash >>= fun block' ->
|
||||
Assert.equal_block ~msg:__LOC__ (Some block) block' ;
|
||||
Hashtbl.add tbl name (hash, block) ;
|
||||
@ -162,7 +169,7 @@ let build_example_tree net =
|
||||
build_chain net tbl otbl b7 chain >>= fun () ->
|
||||
let pending_op = "PP" in
|
||||
let oph, op, bytes = operation pending_op in
|
||||
State.Operation.store net op >>= fun _ ->
|
||||
State.Operation.store net oph op >>= fun _ ->
|
||||
State.Operation.read_opt net oph >>= fun op' ->
|
||||
Assert.equal_operation ~msg:__LOC__ (Some op) op' ;
|
||||
Hashtbl.add otbl pending_op (oph, Ok op) ;
|
||||
|
@ -89,6 +89,9 @@ let test_operation s =
|
||||
(** Block store *)
|
||||
|
||||
let lolblock ?(operations = []) header =
|
||||
let operations =
|
||||
Operation_list_list_hash.compute
|
||||
[Operation_list_hash.compute operations] in
|
||||
{ Store.Block_header.shell =
|
||||
{ timestamp = Time.of_seconds (Random.int64 1500L) ;
|
||||
net_id ;
|
||||
|
Loading…
Reference in New Issue
Block a user