2016-09-08 21:13:10 +04:00
|
|
|
(**************************************************************************)
|
|
|
|
(* *)
|
2018-02-06 00:17:03 +04:00
|
|
|
(* Copyright (c) 2014 - 2018. *)
|
2016-09-08 21:13:10 +04:00
|
|
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
|
|
|
(* *)
|
|
|
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
|
|
|
(* *)
|
|
|
|
(**************************************************************************)
|
|
|
|
|
|
|
|
open Logging.Node.State
|
|
|
|
|
|
|
|
type error +=
|
2018-02-16 04:26:24 +04:00
|
|
|
| Unknown_chain of Chain_id.t
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2017-09-13 17:44:19 +04:00
|
|
|
type error += Bad_data_dir
|
|
|
|
|
2018-01-13 02:57:08 +04:00
|
|
|
type error += Block_not_invalid of Block_hash.t
|
|
|
|
|
2016-09-08 21:13:10 +04:00
|
|
|
let () =
|
2018-01-13 02:57:08 +04:00
|
|
|
let open Error_monad in
|
|
|
|
register_error_kind
|
2016-09-08 21:13:10 +04:00
|
|
|
`Temporary
|
2018-02-16 04:26:24 +04:00
|
|
|
~id:"state.unknown_chain"
|
|
|
|
~title:"Unknown chain"
|
2016-09-08 21:13:10 +04:00
|
|
|
~description:"TODO"
|
2017-02-24 20:17:53 +04:00
|
|
|
~pp:(fun ppf id ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Format.fprintf ppf "Unknown chain %a" Chain_id.pp id)
|
|
|
|
Data_encoding.(obj1 (req "chain" Chain_id.encoding))
|
|
|
|
(function Unknown_chain x -> Some x | _ -> None)
|
|
|
|
(fun x -> Unknown_chain x) ;
|
2018-01-13 02:57:08 +04:00
|
|
|
register_error_kind
|
2017-09-13 17:44:19 +04:00
|
|
|
`Permanent
|
|
|
|
~id:"badDataDir"
|
|
|
|
~title:"Bad data directory"
|
|
|
|
~description:"The data directory could not be read. \
|
|
|
|
This could be because it was generated with an \
|
|
|
|
old version of the tezos-node program. \
|
|
|
|
Deleting and regenerating this directory \
|
|
|
|
may fix the problem."
|
|
|
|
Data_encoding.empty
|
|
|
|
(function Bad_data_dir -> Some () | _ -> None)
|
|
|
|
(fun () -> Bad_data_dir) ;
|
2018-01-13 02:57:08 +04:00
|
|
|
register_error_kind
|
|
|
|
`Permanent
|
|
|
|
~id:"blockNotInvalid"
|
|
|
|
~title:"Block not invalid"
|
|
|
|
~description:"The invalid block to be unmarked was not actually invalid."
|
|
|
|
~pp:(fun ppf block ->
|
|
|
|
Format.fprintf ppf "Block %a was expected to be invalid, but was not actually invalid."
|
|
|
|
Block_hash.pp block)
|
|
|
|
Data_encoding.(obj1 (req "block" Block_hash.encoding))
|
|
|
|
(function Block_not_invalid block -> Some block | _ -> None)
|
|
|
|
(fun block -> Block_not_invalid block) ;
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2017-11-13 19:34:00 +04:00
|
|
|
(** *)
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
module Shared = struct
|
2017-02-24 20:17:53 +04:00
|
|
|
type 'a t = {
|
|
|
|
data: 'a ;
|
|
|
|
lock: Lwt_mutex.t ;
|
|
|
|
}
|
|
|
|
let create data = { data ; lock = Lwt_mutex.create () }
|
|
|
|
let use { data ; lock } f =
|
|
|
|
Lwt_mutex.with_lock lock (fun () -> f data)
|
|
|
|
end
|
|
|
|
|
|
|
|
type global_state = {
|
|
|
|
global_data: global_data Shared.t ;
|
|
|
|
protocol_store: Store.Protocol.store Shared.t ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:17:53 +04:00
|
|
|
and global_data = {
|
2018-02-16 04:26:24 +04:00
|
|
|
chains: chain_state Chain_id.Table.t ;
|
2017-02-24 20:17:53 +04:00
|
|
|
global_store: Store.t ;
|
2017-04-19 23:46:10 +04:00
|
|
|
context_index: Context.index ;
|
2017-02-24 20:17:53 +04:00
|
|
|
}
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
and chain_state = {
|
2017-09-29 20:43:13 +04:00
|
|
|
global_state: global_state ;
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_id: Chain_id.t ;
|
2017-02-24 20:17:53 +04:00
|
|
|
genesis: genesis ;
|
2017-11-11 06:34:12 +04:00
|
|
|
faked_genesis_hash: Block_hash.t ;
|
2017-02-24 20:17:53 +04:00
|
|
|
expiration: Time.t option ;
|
2018-02-16 04:26:24 +04:00
|
|
|
allow_forked_chain: bool ;
|
2017-04-19 23:46:10 +04:00
|
|
|
block_store: Store.Block.store Shared.t ;
|
|
|
|
context_index: Context.index Shared.t ;
|
2017-11-27 09:13:12 +04:00
|
|
|
block_watcher: block Lwt_watcher.input ;
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_data: chain_data_state Shared.t ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:17:53 +04:00
|
|
|
and genesis = {
|
|
|
|
time: Time.t ;
|
|
|
|
block: Block_hash.t ;
|
|
|
|
protocol: Protocol_hash.t ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
and chain_data_state = {
|
2017-04-19 23:46:10 +04:00
|
|
|
mutable data: chain_data ;
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_data_store: Store.Chain_data.store ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
and chain_data = {
|
|
|
|
current_head: block ;
|
2017-11-30 21:34:22 +04:00
|
|
|
current_mempool: Mempool.t ;
|
2017-11-14 04:29:19 +04:00
|
|
|
live_blocks: Block_hash.Set.t ;
|
|
|
|
live_operations: Operation_hash.Set.t ;
|
2017-12-17 22:51:06 +04:00
|
|
|
locator: Block_locator.t Lwt.t lazy_t ;
|
2017-11-13 17:33:39 +04:00
|
|
|
}
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
and block = {
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_state: chain_state ;
|
2017-04-19 23:46:10 +04:00
|
|
|
hash: Block_hash.t ;
|
|
|
|
contents: Store.Block.contents ;
|
|
|
|
}
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_chain_data { chain_data } f =
|
|
|
|
Shared.use chain_data begin fun state ->
|
|
|
|
f state.chain_data_store state.data
|
2017-02-24 20:17:53 +04:00
|
|
|
end
|
2016-10-21 16:01:20 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let update_chain_data { chain_id ; context_index ; chain_data } f =
|
|
|
|
Shared.use chain_data begin fun state ->
|
|
|
|
f state.chain_data_store state.data >>= fun (data, res) ->
|
2017-09-22 20:20:26 +04:00
|
|
|
Lwt_utils.may data
|
|
|
|
~f:begin fun data ->
|
|
|
|
state.data <- data ;
|
|
|
|
Shared.use context_index begin fun context_index ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Context.set_head context_index chain_id
|
2017-09-22 20:20:26 +04:00
|
|
|
data.current_head.contents.context
|
|
|
|
end >>= fun () ->
|
|
|
|
Lwt.return_unit
|
|
|
|
end >>= fun () ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt.return res
|
2017-03-30 15:16:21 +04:00
|
|
|
end
|
|
|
|
|
2018-01-25 14:25:50 +04:00
|
|
|
(** The number of predecessors stored per block.
|
|
|
|
This value chosen to compute efficiently block locators that
|
|
|
|
can cover a chain of 2 months, at 1 block/min, which is ~86K
|
|
|
|
blocks at the cost in space of ~72MB.
|
|
|
|
|locator| = log2(|chain|/10) -1
|
|
|
|
*)
|
|
|
|
let stored_predecessors_size = 12
|
|
|
|
|
|
|
|
(**
|
|
|
|
Takes a block and populates its predecessors store, under the
|
|
|
|
assumption that all its predecessors have their store already
|
|
|
|
populated. The precedecessors are distributed along the chain, up
|
|
|
|
to the genesis, at a distance from [b] that grows exponentially.
|
|
|
|
The store tabulates a function [p] from distances to block_ids such
|
|
|
|
that if [p(b,d)=b'] then [b'] is at distance 2^d from [b].
|
|
|
|
Example of how previous predecessors are used:
|
|
|
|
p(n,0) = n-1
|
|
|
|
p(n,1) = n-2 = p(n-1,0)
|
|
|
|
p(n,2) = n-4 = p(n-2,1)
|
|
|
|
p(n,3) = n-8 = p(n-4,2)
|
|
|
|
p(n,4) = n-16 = p(n-8,3)
|
|
|
|
*)
|
|
|
|
let store_predecessors (store: Store.Block.store) (b: Block_hash.t) : unit Lwt.t =
|
|
|
|
let rec loop pred dist =
|
|
|
|
if dist = stored_predecessors_size
|
|
|
|
then Lwt.return_unit
|
|
|
|
else
|
|
|
|
Store.Block.Predecessors.read_opt (store, pred) (dist-1) >>= function
|
|
|
|
| None -> Lwt.return_unit (* we reached genesis *)
|
|
|
|
| Some p ->
|
|
|
|
Store.Block.Predecessors.store (store, b) dist p >>= fun () ->
|
|
|
|
loop p (dist+1)
|
|
|
|
in
|
|
|
|
(* the first predecessor is fetched from the header *)
|
|
|
|
Store.Block.Contents.read_exn (store, b) >>= fun contents ->
|
|
|
|
let pred = contents.header.shell.predecessor in
|
|
|
|
if Block_hash.equal b pred then
|
|
|
|
Lwt.return_unit (* genesis *)
|
|
|
|
else
|
|
|
|
Store.Block.Predecessors.store (store,b) 0 pred >>= fun () ->
|
|
|
|
loop pred 1
|
|
|
|
|
2018-01-25 15:01:12 +04:00
|
|
|
(**
|
|
|
|
[predecessor s b d] returns the hash of the node at distance [d] from [b].
|
|
|
|
Returns [None] if [d] is greater than the distance of [b] from genesis or
|
|
|
|
if [b] is genesis.
|
|
|
|
Works in O(log|chain|) if the chain is shorter than 2^[stored_predecessors_size]
|
|
|
|
and in O(|chain|) after that.
|
|
|
|
@raise Invalid_argument "State.predecessors: negative distance"
|
|
|
|
*)
|
|
|
|
let predecessor_n (store: Store.Block.store) (b: Block_hash.t) (distance: int)
|
|
|
|
: Block_hash.t option Lwt.t =
|
|
|
|
(* helper functions *)
|
|
|
|
(* computes power of 2 w/o floats *)
|
|
|
|
let power_of_2 n =
|
|
|
|
if n < 0 then invalid_arg "negative argument" else
|
|
|
|
let rec loop cnt res =
|
|
|
|
if cnt<1 then res
|
|
|
|
else loop (cnt-1) (res*2)
|
|
|
|
in
|
|
|
|
loop n 1
|
|
|
|
in
|
|
|
|
(* computes the closest power of two smaller than a given
|
|
|
|
a number and the rest w/o floats *)
|
|
|
|
let closest_power_two_and_rest n =
|
|
|
|
if n < 0 then invalid_arg "negative argument" else
|
|
|
|
let rec loop cnt n rest =
|
|
|
|
if n<=1
|
|
|
|
then (cnt,rest)
|
|
|
|
else loop (cnt+1) (n/2) (rest + (power_of_2 cnt) * (n mod 2))
|
|
|
|
in
|
|
|
|
loop 0 n 0
|
|
|
|
in
|
|
|
|
|
|
|
|
(* actual predecessor function *)
|
|
|
|
if distance <= 0 then
|
|
|
|
invalid_arg ("State.predecessor: distance <= 0"^(string_of_int distance))
|
|
|
|
else
|
|
|
|
let rec loop b distance =
|
|
|
|
if distance = 1
|
|
|
|
then Store.Block.Predecessors.read_opt (store, b) 0
|
|
|
|
else
|
|
|
|
let (power,rest) = closest_power_two_and_rest distance in
|
|
|
|
let (power,rest) =
|
|
|
|
if power < stored_predecessors_size then (power,rest)
|
|
|
|
else
|
|
|
|
let power = stored_predecessors_size-1 in
|
|
|
|
let rest = distance - (power_of_2 power) in
|
|
|
|
(power,rest)
|
|
|
|
in
|
|
|
|
Store.Block.Predecessors.read_opt (store, b) power >>= function
|
|
|
|
| None -> Lwt.return_none (* reached genesis *)
|
|
|
|
| Some pred ->
|
|
|
|
if rest = 0
|
|
|
|
then Lwt.return_some pred (* landed on the requested predecessor *)
|
|
|
|
else loop pred rest (* need to jump further back *)
|
|
|
|
in
|
|
|
|
loop b distance
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let compute_locator_from_hash (chain : chain_state) ?(size = 200) head_hash =
|
|
|
|
Shared.use chain.block_store begin fun block_store ->
|
2018-01-25 15:00:42 +04:00
|
|
|
Store.Block.Contents.read_exn (block_store, head_hash) >>= fun { header } ->
|
|
|
|
Block_locator.compute ~predecessor:(predecessor_n block_store)
|
2018-02-16 04:26:24 +04:00
|
|
|
~genesis:chain.genesis.block head_hash header size
|
2017-12-17 22:51:06 +04:00
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let compute_locator chain ?size head =
|
|
|
|
compute_locator_from_hash chain ?size head.hash
|
2017-12-17 22:51:06 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
type t = global_state
|
2017-03-30 15:16:21 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
module Locked_block = struct
|
2017-03-30 15:16:21 +04:00
|
|
|
|
2017-12-05 18:18:05 +04:00
|
|
|
let store_genesis store genesis context =
|
2017-04-19 21:21:23 +04:00
|
|
|
let shell : Block_header.shell_header = {
|
2017-04-10 15:01:22 +04:00
|
|
|
level = 0l ;
|
2017-04-12 20:22:40 +04:00
|
|
|
proto_level = 0 ;
|
2018-01-25 14:25:50 +04:00
|
|
|
predecessor = genesis.block ; (* genesis' predecessor is genesis *)
|
2017-02-24 20:17:53 +04:00
|
|
|
timestamp = genesis.time ;
|
|
|
|
fitness = [] ;
|
2017-09-29 20:43:13 +04:00
|
|
|
validation_passes = 0 ;
|
2017-04-10 19:06:11 +04:00
|
|
|
operations_hash = Operation_list_list_hash.empty ;
|
2017-12-05 18:19:22 +04:00
|
|
|
context ;
|
2017-02-24 20:17:53 +04:00
|
|
|
} in
|
2018-02-16 17:05:46 +04:00
|
|
|
let header : Block_header.t = { shell ; protocol_data = MBytes.create 0 } in
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.store (store, genesis.block)
|
2017-12-05 18:18:05 +04:00
|
|
|
{ Store.Block.header ; message = Some "Genesis" ;
|
|
|
|
max_operations_ttl = 0 ; context ;
|
2017-11-19 17:38:36 +04:00
|
|
|
max_operation_data_length = 0;
|
|
|
|
} >>= fun () ->
|
2017-03-22 20:21:52 +04:00
|
|
|
Lwt.return header
|
2017-02-24 20:17:53 +04:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
module Chain = struct
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
type nonrec genesis = genesis = {
|
|
|
|
time: Time.t ;
|
|
|
|
block: Block_hash.t ;
|
|
|
|
protocol: Protocol_hash.t ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
2017-04-19 23:46:10 +04:00
|
|
|
let genesis_encoding =
|
|
|
|
let open Data_encoding in
|
|
|
|
conv
|
|
|
|
(fun { time ; block ; protocol } -> (time, block, protocol))
|
|
|
|
(fun (time, block, protocol) -> { time ; block ; protocol })
|
|
|
|
(obj3
|
|
|
|
(req "timestamp" Time.encoding)
|
|
|
|
(req "block" Block_hash.encoding)
|
|
|
|
(req "protocol" Protocol_hash.encoding))
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
type t = chain_state
|
|
|
|
type chain_state = t
|
2017-04-19 23:46:10 +04:00
|
|
|
|
|
|
|
let allocate
|
2018-02-16 04:26:24 +04:00
|
|
|
~genesis ~faked_genesis_hash ~expiration ~allow_forked_chain
|
2017-04-19 23:46:10 +04:00
|
|
|
~current_head
|
2018-02-16 04:26:24 +04:00
|
|
|
global_state context_index chain_data_store block_store =
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.read_exn
|
|
|
|
(block_store, current_head) >>= fun current_block ->
|
2018-02-16 04:26:24 +04:00
|
|
|
let rec chain_data = {
|
2017-04-19 23:46:10 +04:00
|
|
|
data = {
|
|
|
|
current_head = {
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_state ;
|
2017-04-19 23:46:10 +04:00
|
|
|
hash = current_head ;
|
|
|
|
contents = current_block ;
|
2017-09-29 20:43:13 +04:00
|
|
|
} ;
|
2017-11-30 21:34:22 +04:00
|
|
|
current_mempool = Mempool.empty ;
|
2017-11-14 04:29:19 +04:00
|
|
|
live_blocks = Block_hash.Set.singleton genesis.block ;
|
|
|
|
live_operations = Operation_hash.Set.empty ;
|
2018-02-16 04:26:24 +04:00
|
|
|
locator = lazy (compute_locator_from_hash chain_state current_head) ;
|
2017-04-19 23:46:10 +04:00
|
|
|
} ;
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_data_store ;
|
2017-04-19 23:46:10 +04:00
|
|
|
}
|
2018-02-16 04:26:24 +04:00
|
|
|
and chain_state = {
|
2017-09-29 20:43:13 +04:00
|
|
|
global_state ;
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_id = Chain_id.of_block_hash genesis.block ;
|
|
|
|
chain_data = { Shared.data = chain_data ; lock = Lwt_mutex.create () } ;
|
2017-11-11 06:34:12 +04:00
|
|
|
genesis ; faked_genesis_hash ;
|
2017-04-19 23:46:10 +04:00
|
|
|
expiration ;
|
2018-02-16 04:26:24 +04:00
|
|
|
allow_forked_chain ;
|
2017-04-19 23:46:10 +04:00
|
|
|
block_store = Shared.create block_store ;
|
|
|
|
context_index = Shared.create context_index ;
|
2017-11-27 09:13:12 +04:00
|
|
|
block_watcher = Lwt_watcher.create_input () ;
|
2017-04-19 23:46:10 +04:00
|
|
|
} in
|
2018-02-16 04:26:24 +04:00
|
|
|
Lwt.return chain_state
|
2017-02-24 20:17:53 +04:00
|
|
|
|
|
|
|
let locked_create
|
2018-02-16 04:26:24 +04:00
|
|
|
global_state data ?expiration ?(allow_forked_chain = false)
|
|
|
|
chain_id genesis commit =
|
|
|
|
let chain_store = Store.Chain.get data.global_store chain_id in
|
|
|
|
let block_store = Store.Block.get chain_store
|
|
|
|
and chain_data_store = Store.Chain_data.get chain_store in
|
|
|
|
Store.Chain.Genesis_hash.store chain_store genesis.block >>= fun () ->
|
|
|
|
Store.Chain.Genesis_time.store chain_store genesis.time >>= fun () ->
|
|
|
|
Store.Chain.Genesis_protocol.store chain_store genesis.protocol >>= fun () ->
|
|
|
|
Store.Chain_data.Current_head.store chain_data_store genesis.block >>= fun () ->
|
|
|
|
Store.Chain_data.Known_heads.store chain_data_store genesis.block >>= fun () ->
|
2017-02-24 20:17:53 +04:00
|
|
|
begin
|
|
|
|
match expiration with
|
|
|
|
| None -> Lwt.return_unit
|
2018-02-16 04:26:24 +04:00
|
|
|
| Some time -> Store.Chain.Expiration.store chain_store time
|
2017-02-24 20:17:53 +04:00
|
|
|
end >>= fun () ->
|
2017-04-10 23:14:17 +04:00
|
|
|
begin
|
2018-02-16 04:26:24 +04:00
|
|
|
if allow_forked_chain then
|
|
|
|
Store.Chain.Allow_forked_chain.store data.global_store chain_id
|
2017-04-10 23:14:17 +04:00
|
|
|
else
|
|
|
|
Lwt.return_unit
|
|
|
|
end >>= fun () ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Locked_block.store_genesis
|
2017-11-11 06:34:12 +04:00
|
|
|
block_store genesis commit >>= fun genesis_header ->
|
2017-04-19 23:46:10 +04:00
|
|
|
allocate
|
2017-02-24 20:17:53 +04:00
|
|
|
~genesis
|
2017-11-11 06:34:12 +04:00
|
|
|
~faked_genesis_hash:(Block_header.hash genesis_header)
|
2017-04-19 23:46:10 +04:00
|
|
|
~current_head:genesis.block
|
2017-02-24 20:17:53 +04:00
|
|
|
~expiration
|
2018-02-16 04:26:24 +04:00
|
|
|
~allow_forked_chain
|
2017-09-29 20:43:13 +04:00
|
|
|
global_state
|
2017-04-19 23:46:10 +04:00
|
|
|
data.context_index
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_data_store
|
2017-04-19 23:46:10 +04:00
|
|
|
block_store
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let create state ?allow_forked_chain genesis =
|
|
|
|
let chain_id = Chain_id.of_block_hash genesis.block in
|
2017-02-24 20:17:53 +04:00
|
|
|
Shared.use state.global_data begin fun data ->
|
2018-02-16 04:26:24 +04:00
|
|
|
if Chain_id.Table.mem data.chains chain_id then
|
|
|
|
Pervasives.failwith "State.Chain.create"
|
2017-02-24 20:17:53 +04:00
|
|
|
else
|
2017-07-17 17:59:09 +04:00
|
|
|
Context.commit_genesis
|
|
|
|
data.context_index
|
2018-02-16 04:26:24 +04:00
|
|
|
~chain_id
|
2017-07-17 17:59:09 +04:00
|
|
|
~time:genesis.time
|
|
|
|
~protocol:genesis.protocol >>= fun commit ->
|
2017-04-19 23:46:10 +04:00
|
|
|
locked_create
|
2018-02-16 04:26:24 +04:00
|
|
|
state data ?allow_forked_chain chain_id genesis commit >>= fun chain ->
|
|
|
|
Chain_id.Table.add data.chains chain_id chain ;
|
|
|
|
Lwt.return chain
|
2017-02-24 20:17:53 +04:00
|
|
|
end
|
|
|
|
|
2017-09-29 20:43:13 +04:00
|
|
|
let locked_read global_state data id =
|
2018-02-16 04:26:24 +04:00
|
|
|
let chain_store = Store.Chain.get data.global_store id in
|
|
|
|
let block_store = Store.Block.get chain_store
|
|
|
|
and chain_data_store = Store.Chain_data.get chain_store in
|
|
|
|
Store.Chain.Genesis_hash.read chain_store >>=? fun genesis_hash ->
|
|
|
|
Store.Chain.Genesis_time.read chain_store >>=? fun time ->
|
|
|
|
Store.Chain.Genesis_protocol.read chain_store >>=? fun protocol ->
|
|
|
|
Store.Chain.Expiration.read_opt chain_store >>= fun expiration ->
|
|
|
|
Store.Chain.Allow_forked_chain.known
|
|
|
|
data.global_store id >>= fun allow_forked_chain ->
|
2017-11-11 06:34:12 +04:00
|
|
|
Store.Block.Contents.read (block_store, genesis_hash) >>=? fun genesis_header ->
|
2017-02-24 20:17:53 +04:00
|
|
|
let genesis = { time ; protocol ; block = genesis_hash } in
|
2018-02-16 04:26:24 +04:00
|
|
|
Store.Chain_data.Current_head.read chain_data_store >>=? fun current_head ->
|
2017-09-13 17:44:19 +04:00
|
|
|
try
|
|
|
|
allocate
|
|
|
|
~genesis
|
2017-11-11 06:34:12 +04:00
|
|
|
~faked_genesis_hash:(Block_header.hash genesis_header.header)
|
2017-09-13 17:44:19 +04:00
|
|
|
~current_head
|
|
|
|
~expiration
|
2018-02-16 04:26:24 +04:00
|
|
|
~allow_forked_chain
|
2017-09-13 17:44:19 +04:00
|
|
|
global_state
|
|
|
|
data.context_index
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_data_store
|
2017-09-13 17:44:19 +04:00
|
|
|
block_store >>= return
|
|
|
|
with Not_found ->
|
|
|
|
fail Bad_data_dir
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2017-09-29 20:43:13 +04:00
|
|
|
let locked_read_all global_state data =
|
2018-02-16 04:26:24 +04:00
|
|
|
Store.Chain.list data.global_store >>= fun ids ->
|
2017-02-24 20:17:53 +04:00
|
|
|
iter_p
|
|
|
|
(fun id ->
|
2018-02-16 04:26:24 +04:00
|
|
|
locked_read global_state data id >>=? fun chain ->
|
|
|
|
Chain_id.Table.add data.chains id chain ;
|
2017-02-24 20:17:53 +04:00
|
|
|
return ())
|
|
|
|
ids
|
|
|
|
|
|
|
|
let read_all state =
|
|
|
|
Shared.use state.global_data begin fun data ->
|
2017-09-29 20:43:13 +04:00
|
|
|
locked_read_all state data
|
2017-02-24 20:17:53 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
let get state id =
|
|
|
|
Shared.use state.global_data begin fun data ->
|
2018-02-16 04:26:24 +04:00
|
|
|
try return (Chain_id.Table.find data.chains id)
|
|
|
|
with Not_found -> fail (Unknown_chain id)
|
2017-02-24 20:17:53 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
let all state =
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use state.global_data begin fun { chains } ->
|
2017-02-24 20:17:53 +04:00
|
|
|
Lwt.return @@
|
2018-02-16 04:26:24 +04:00
|
|
|
Chain_id.Table.fold (fun _ chain acc -> chain :: acc) chains []
|
2017-02-24 20:17:53 +04:00
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let id { chain_id } = chain_id
|
2017-02-24 20:17:53 +04:00
|
|
|
let genesis { genesis } = genesis
|
2017-11-11 06:34:12 +04:00
|
|
|
let faked_genesis_hash { faked_genesis_hash } = faked_genesis_hash
|
2017-02-24 20:17:53 +04:00
|
|
|
let expiration { expiration } = expiration
|
2018-02-16 04:26:24 +04:00
|
|
|
let allow_forked_chain { allow_forked_chain } = allow_forked_chain
|
2017-09-29 20:43:13 +04:00
|
|
|
let global_state { global_state } = global_state
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let destroy state chain =
|
|
|
|
lwt_debug "destroy %a" Chain_id.pp (id chain) >>= fun () ->
|
|
|
|
Shared.use state.global_data begin fun { global_store ; chains } ->
|
|
|
|
Chain_id.Table.remove chains (id chain) ;
|
|
|
|
Store.Chain.destroy global_store (id chain) >>= fun () ->
|
2017-02-24 20:17:53 +04:00
|
|
|
Lwt.return_unit
|
|
|
|
end
|
2016-09-08 21:13:10 +04:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
module Block = struct
|
2016-09-08 21:13:10 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
type t = block = {
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_state: Chain.t ;
|
2017-04-19 23:46:10 +04:00
|
|
|
hash: Block_hash.t ;
|
|
|
|
contents: Store.Block.contents ;
|
2016-09-08 21:13:10 +04:00
|
|
|
}
|
2017-04-19 23:46:10 +04:00
|
|
|
type block = t
|
|
|
|
|
|
|
|
let compare b1 b2 = Block_hash.compare b1.hash b2.hash
|
|
|
|
let equal b1 b2 = Block_hash.equal b1.hash b2.hash
|
|
|
|
|
|
|
|
let hash { hash } = hash
|
|
|
|
let header { contents = { header } } = header
|
2018-02-16 04:26:24 +04:00
|
|
|
let chain_state { chain_state } = chain_state
|
|
|
|
let chain_id { chain_state = { chain_id } } = chain_id
|
2017-04-19 23:46:10 +04:00
|
|
|
let shell_header { contents = { header = { shell } } } = shell
|
|
|
|
let timestamp b = (shell_header b).timestamp
|
|
|
|
let fitness b = (shell_header b).fitness
|
|
|
|
let level b = (shell_header b).level
|
|
|
|
let proto_level b = (shell_header b).proto_level
|
2017-09-29 20:43:13 +04:00
|
|
|
let validation_passes b = (shell_header b).validation_passes
|
2017-04-19 23:46:10 +04:00
|
|
|
let message { contents = { message } } = message
|
2017-04-20 10:49:14 +04:00
|
|
|
let max_operations_ttl { contents = { max_operations_ttl } } =
|
|
|
|
max_operations_ttl
|
2017-11-19 17:38:36 +04:00
|
|
|
let max_operation_data_length { contents = { max_operation_data_length } } =
|
|
|
|
max_operation_data_length
|
2017-04-19 23:46:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let is_genesis b = Block_hash.equal b.hash b.chain_state.genesis.block
|
2017-11-11 06:34:12 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let known_valid chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.known (store, hash)
|
|
|
|
end
|
2018-02-16 04:26:24 +04:00
|
|
|
let known_invalid chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Invalid_block.known store hash
|
|
|
|
end
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_invalid chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-10-31 20:59:02 +04:00
|
|
|
Store.Block.Invalid_block.read_opt store hash
|
|
|
|
end
|
2018-02-16 04:26:24 +04:00
|
|
|
let list_invalid chain_state =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-11-27 01:21:56 +04:00
|
|
|
Store.Block.Invalid_block.fold store ~init:[]
|
|
|
|
~f:(fun hash { level ; errors } acc ->
|
|
|
|
Lwt.return ((hash, level, errors) :: acc))
|
|
|
|
end
|
2018-02-16 04:26:24 +04:00
|
|
|
let unmark_invalid chain_state block =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2018-01-13 02:57:08 +04:00
|
|
|
Store.Block.Invalid_block.known store block >>= fun mem ->
|
|
|
|
if mem
|
|
|
|
then Store.Block.Invalid_block.remove store block >>= return
|
|
|
|
else fail (Block_not_invalid block)
|
|
|
|
end
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let known chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-09-29 20:43:13 +04:00
|
|
|
Store.Block.Contents.known (store, hash) >>= fun known ->
|
|
|
|
if known then
|
|
|
|
Lwt.return_true
|
|
|
|
else
|
|
|
|
Store.Block.Invalid_block.known store hash
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let read chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.read (store, hash) >>=? fun contents ->
|
2018-02-16 04:26:24 +04:00
|
|
|
return { chain_state ; hash ; contents }
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_opt chain_state hash =
|
|
|
|
read chain_state hash >>= function
|
2017-04-19 23:46:10 +04:00
|
|
|
| Error _ -> Lwt.return None
|
|
|
|
| Ok v -> Lwt.return (Some v)
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_exn chain_state hash =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.read_exn (store, hash) >>= fun contents ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Lwt.return { chain_state ; hash ; contents }
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
(* Quick accessor to be optimized ?? *)
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_predecessor chain_state hash =
|
|
|
|
read chain_state hash >>=? fun { contents = { header } } ->
|
2017-04-19 23:46:10 +04:00
|
|
|
return header.shell.predecessor
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_predecessor_opt chain_state hash =
|
|
|
|
read_predecessor chain_state hash >>= function
|
2017-04-19 23:46:10 +04:00
|
|
|
| Error _ -> Lwt.return None
|
|
|
|
| Ok v -> Lwt.return (Some v)
|
2018-02-16 04:26:24 +04:00
|
|
|
let read_predecessor_exn chain_state hash =
|
|
|
|
read_exn chain_state hash >>= fun { contents = { header } } ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt.return header.shell.predecessor
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let predecessor { chain_state ; contents = { header } ; hash } =
|
2017-04-19 23:46:10 +04:00
|
|
|
if Block_hash.equal hash header.shell.predecessor then
|
2018-01-25 14:25:50 +04:00
|
|
|
Lwt.return_none (* we are at genesis *)
|
2017-04-19 23:46:10 +04:00
|
|
|
else
|
2018-02-16 04:26:24 +04:00
|
|
|
read_exn chain_state header.shell.predecessor >>= fun block ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt.return (Some block)
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let predecessor_n (chain: Chain.t) (b: Block_hash.t) (distance: int) : Block_hash.t option Lwt.t =
|
|
|
|
Shared.use chain.block_store (fun store ->
|
2018-02-02 23:40:23 +04:00
|
|
|
predecessor_n store b distance)
|
|
|
|
|
|
|
|
|
2017-12-05 18:19:25 +04:00
|
|
|
type error += Inconsistent_hash of Context_hash.t * Context_hash.t
|
|
|
|
|
|
|
|
let () =
|
|
|
|
Error_monad.register_error_kind
|
|
|
|
`Permanent
|
|
|
|
~id:"inconsistentContextHash"
|
|
|
|
~title:"Inconsistent commit hash"
|
|
|
|
~description:
|
|
|
|
"When commiting the context of a block, the announced context \
|
|
|
|
hash was not the one computed at commit time."
|
2018-02-12 19:36:24 +04:00
|
|
|
~pp: (fun ppf (got, exp) ->
|
|
|
|
Format.fprintf ppf
|
|
|
|
"@[<v 2>Inconsistant hash:@ got: %a@ expected: %a"
|
|
|
|
Context_hash.pp got
|
|
|
|
Context_hash.pp exp)
|
2017-12-05 18:19:25 +04:00
|
|
|
Data_encoding.(obj2
|
|
|
|
(req "wrong_context_hash" Context_hash.encoding)
|
|
|
|
(req "expected_context_hash" Context_hash.encoding))
|
|
|
|
(function Inconsistent_hash (got, exp) -> Some (got, exp) | _ -> None)
|
|
|
|
(fun (got, exp) -> Inconsistent_hash (got, exp))
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
let store
|
2018-02-12 19:36:24 +04:00
|
|
|
?(dont_enforce_context_hash = false)
|
2018-02-16 04:26:24 +04:00
|
|
|
chain_state block_header operations
|
2018-02-17 17:39:45 +04:00
|
|
|
{ Tezos_protocol_environment_shell.context ; message ;
|
|
|
|
max_operations_ttl ; max_operation_data_length } =
|
2017-04-19 23:46:10 +04:00
|
|
|
let bytes = Block_header.to_bytes block_header in
|
|
|
|
let hash = Block_header.hash_raw bytes in
|
|
|
|
(* let's the validator check the consistency... of fitness, level, ... *)
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Invalid_block.known store hash >>= fun known_invalid ->
|
|
|
|
fail_when known_invalid (failure "Known invalid") >>=? fun () ->
|
|
|
|
Store.Block.Contents.known (store, hash) >>= fun known ->
|
|
|
|
if known then
|
2017-07-17 17:59:09 +04:00
|
|
|
return None
|
2017-04-19 23:46:10 +04:00
|
|
|
else begin
|
2017-07-17 17:59:09 +04:00
|
|
|
Context.commit
|
2017-12-05 18:18:05 +04:00
|
|
|
~time:block_header.shell.timestamp ?message context >>= fun commit ->
|
2017-12-05 18:19:25 +04:00
|
|
|
fail_unless
|
2018-02-12 19:36:24 +04:00
|
|
|
(dont_enforce_context_hash
|
|
|
|
|| Context_hash.equal block_header.shell.context commit)
|
2017-12-05 18:19:25 +04:00
|
|
|
(Inconsistent_hash (commit, block_header.shell.context)) >>=? fun () ->
|
2017-07-17 17:59:09 +04:00
|
|
|
let contents = {
|
2018-02-12 19:36:24 +04:00
|
|
|
Store.Block.header =
|
|
|
|
if dont_enforce_context_hash then
|
|
|
|
{ block_header
|
|
|
|
with shell = { block_header.shell with context = commit } }
|
|
|
|
else
|
|
|
|
block_header ;
|
2017-07-17 17:59:09 +04:00
|
|
|
message ;
|
|
|
|
max_operations_ttl ;
|
2017-11-19 17:38:36 +04:00
|
|
|
max_operation_data_length ;
|
2017-07-17 17:59:09 +04:00
|
|
|
context = commit ;
|
|
|
|
} in
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.store (store, hash) contents >>= fun () ->
|
|
|
|
let hashes = List.map (List.map Operation.hash) operations in
|
|
|
|
let list_hashes = List.map Operation_list_hash.compute hashes in
|
|
|
|
Lwt_list.iteri_p
|
|
|
|
(fun i hashes ->
|
|
|
|
let path = Operation_list_list_hash.compute_path list_hashes i in
|
|
|
|
Store.Block.Operation_hashes.store
|
|
|
|
(store, hash) i hashes >>= fun () ->
|
|
|
|
Store.Block.Operation_path.store (store, hash) i path)
|
|
|
|
hashes >>= fun () ->
|
|
|
|
Lwt_list.iteri_p
|
|
|
|
(fun i ops -> Store.Block.Operations.store (store, hash) i ops)
|
|
|
|
operations >>= fun () ->
|
2018-01-25 14:25:50 +04:00
|
|
|
(* Store predecessors *)
|
|
|
|
store_predecessors store hash >>= fun () ->
|
2017-07-17 17:59:09 +04:00
|
|
|
(* Update the chain state. *)
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.chain_data begin fun chain_data ->
|
|
|
|
let store = chain_data.chain_data_store in
|
2017-07-17 17:59:09 +04:00
|
|
|
let predecessor = block_header.shell.predecessor in
|
2018-02-16 04:26:24 +04:00
|
|
|
Store.Chain_data.Known_heads.remove store predecessor >>= fun () ->
|
|
|
|
Store.Chain_data.Known_heads.store store hash
|
2017-07-17 17:59:09 +04:00
|
|
|
end >>= fun () ->
|
2018-02-16 04:26:24 +04:00
|
|
|
let block = { chain_state ; hash ; contents } in
|
|
|
|
Lwt_watcher.notify chain_state.block_watcher block ;
|
2017-07-17 17:59:09 +04:00
|
|
|
return (Some block)
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
2017-07-17 17:59:09 +04:00
|
|
|
end
|
2017-04-19 23:46:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let store_invalid chain_state block_header errors =
|
2017-04-19 23:46:10 +04:00
|
|
|
let bytes = Block_header.to_bytes block_header in
|
|
|
|
let hash = Block_header.hash_raw bytes in
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Contents.known (store, hash) >>= fun known_valid ->
|
|
|
|
fail_when known_valid (failure "Known valid") >>=? fun () ->
|
|
|
|
Store.Block.Invalid_block.known store hash >>= fun known_invalid ->
|
|
|
|
if known_invalid then
|
|
|
|
return false
|
|
|
|
else
|
|
|
|
Store.Block.Invalid_block.store store hash
|
2017-10-31 20:59:02 +04:00
|
|
|
{ level = block_header.shell.level ; errors } >>= fun () ->
|
2017-04-19 23:46:10 +04:00
|
|
|
return true
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let watcher chain_state =
|
|
|
|
Lwt_watcher.create_stream chain_state.block_watcher
|
2017-04-19 23:46:10 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let operation_hashes { chain_state ; hash ; contents } i =
|
2017-09-29 20:43:13 +04:00
|
|
|
if i < 0 || contents.header.shell.validation_passes <= i then
|
2017-04-19 23:46:10 +04:00
|
|
|
invalid_arg "State.Block.operations" ;
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Operation_hashes.read_exn (store, hash) i >>= fun hashes ->
|
|
|
|
Store.Block.Operation_path.read_exn (store, hash) i >>= fun path ->
|
|
|
|
Lwt.return (hashes, path)
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let all_operation_hashes { chain_state ; hash ; contents } =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt_list.map_p
|
|
|
|
(Store.Block.Operation_hashes.read_exn (store, hash))
|
2017-09-29 20:43:13 +04:00
|
|
|
(0 -- (contents.header.shell.validation_passes - 1))
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let operations { chain_state ; hash ; contents } i =
|
2017-09-29 20:43:13 +04:00
|
|
|
if i < 0 || contents.header.shell.validation_passes <= i then
|
2017-04-19 23:46:10 +04:00
|
|
|
invalid_arg "State.Block.operations" ;
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.Block.Operation_path.read_exn (store, hash) i >>= fun path ->
|
|
|
|
Store.Block.Operations.read_exn (store, hash) i >>= fun ops ->
|
|
|
|
Lwt.return (ops, path)
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let all_operations { chain_state ; hash ; contents } =
|
|
|
|
Shared.use chain_state.block_store begin fun store ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt_list.map_p
|
|
|
|
(fun i -> Store.Block.Operations.read_exn (store, hash) i)
|
2017-09-29 20:43:13 +04:00
|
|
|
(0 -- (contents.header.shell.validation_passes - 1))
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let context { chain_state ; hash } =
|
|
|
|
Shared.use chain_state.block_store begin fun block_store ->
|
2017-07-17 17:59:09 +04:00
|
|
|
Store.Block.Contents.read_exn (block_store, hash)
|
2017-12-05 18:19:25 +04:00
|
|
|
end >>= fun { context = commit } ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use chain_state.context_index begin fun context_index ->
|
2017-07-17 17:59:09 +04:00
|
|
|
Context.checkout_exn context_index commit
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2017-07-17 17:59:09 +04:00
|
|
|
let protocol_hash block =
|
|
|
|
context block >>= fun context ->
|
|
|
|
Context.get_protocol context
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let test_chain block =
|
2017-07-17 17:59:09 +04:00
|
|
|
context block >>= fun context ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Context.get_test_chain context
|
2017-02-24 20:17:53 +04:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
let read_block { global_data } hash =
|
2018-02-16 04:26:24 +04:00
|
|
|
Shared.use global_data begin fun { chains } ->
|
|
|
|
Chain_id.Table.fold
|
|
|
|
(fun _chain_id chain_state acc ->
|
2017-04-19 23:46:10 +04:00
|
|
|
acc >>= function
|
|
|
|
| Some _ -> acc
|
|
|
|
| None ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Block.read_opt chain_state hash >>= function
|
2017-04-19 23:46:10 +04:00
|
|
|
| None -> acc
|
|
|
|
| Some block -> Lwt.return (Some block))
|
2018-02-16 04:26:24 +04:00
|
|
|
chains
|
2017-04-19 23:46:10 +04:00
|
|
|
Lwt.return_none
|
|
|
|
end
|
|
|
|
|
|
|
|
let read_block_exn t hash =
|
|
|
|
read_block t hash >>= function
|
|
|
|
| None -> Lwt.fail Not_found
|
|
|
|
| Some b -> Lwt.return b
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let fork_testchain block protocol expiration =
|
|
|
|
Shared.use block.chain_state.global_state.global_data begin fun data ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Block.context block >>= fun context ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Context.set_test_chain context Not_running >>= fun context ->
|
2017-04-19 23:46:10 +04:00
|
|
|
Context.set_protocol context protocol >>= fun context ->
|
2018-02-16 04:26:24 +04:00
|
|
|
Context.commit_test_chain_genesis
|
2017-07-17 17:59:09 +04:00
|
|
|
data.context_index block.hash block.contents.header.shell.timestamp
|
2018-02-16 04:26:24 +04:00
|
|
|
context >>=? fun (chain_id, genesis, commit) ->
|
2017-04-19 23:46:10 +04:00
|
|
|
let genesis = {
|
|
|
|
block = genesis ;
|
|
|
|
time = Time.add block.contents.header.shell.timestamp 1L ;
|
|
|
|
protocol ;
|
|
|
|
} in
|
2018-02-16 04:26:24 +04:00
|
|
|
Chain.locked_create block.chain_state.global_state data
|
|
|
|
chain_id ~expiration genesis commit >>= fun chain ->
|
|
|
|
return chain
|
2017-04-19 23:46:10 +04:00
|
|
|
end
|
|
|
|
|
2017-02-24 20:17:53 +04:00
|
|
|
module Protocol = struct
|
|
|
|
|
2017-11-27 09:13:12 +04:00
|
|
|
include Protocol
|
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
let known global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.known store hash
|
|
|
|
end
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
let read global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.read store hash
|
|
|
|
end
|
|
|
|
let read_opt global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.read_opt store hash
|
|
|
|
end
|
|
|
|
let read_exn global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.read_exn store hash
|
|
|
|
end
|
2017-02-24 20:17:53 +04:00
|
|
|
|
2017-04-19 23:46:10 +04:00
|
|
|
let read_raw global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.RawContents.read (store, hash)
|
|
|
|
end
|
|
|
|
let read_raw_opt global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.RawContents.read_opt (store, hash)
|
|
|
|
end
|
|
|
|
let read_raw_exn global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.RawContents.read_exn (store, hash)
|
|
|
|
end
|
|
|
|
|
|
|
|
let store global_state p =
|
|
|
|
let bytes = Protocol.to_bytes p in
|
|
|
|
let hash = Protocol.hash_raw bytes in
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.known store hash >>= fun known ->
|
|
|
|
if known then
|
|
|
|
Lwt.return None
|
|
|
|
else
|
|
|
|
Store.Protocol.RawContents.store (store, hash) bytes >>= fun () ->
|
|
|
|
Lwt.return (Some hash)
|
|
|
|
end
|
|
|
|
|
|
|
|
let remove global_state hash =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.known store hash >>= fun known ->
|
|
|
|
if known then
|
|
|
|
Lwt.return_false
|
|
|
|
else
|
|
|
|
Store.Protocol.Contents.remove store hash >>= fun () ->
|
|
|
|
Lwt.return_true
|
|
|
|
end
|
|
|
|
|
|
|
|
let list global_state =
|
|
|
|
Shared.use global_state.protocol_store begin fun store ->
|
|
|
|
Store.Protocol.Contents.fold_keys store
|
|
|
|
~init:Protocol_hash.Set.empty
|
|
|
|
~f:(fun x acc -> Lwt.return (Protocol_hash.Set.add x acc))
|
|
|
|
end
|
2017-02-24 20:17:53 +04:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2017-11-30 21:34:22 +04:00
|
|
|
module Current_mempool = struct
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let set chain_state ~head mempool =
|
|
|
|
update_chain_data chain_state begin fun _chain_data_store data ->
|
2017-11-30 21:34:22 +04:00
|
|
|
if Block_hash.equal head (Block.hash data.current_head) then
|
|
|
|
Lwt.return (Some { data with current_mempool = mempool },
|
|
|
|
())
|
|
|
|
else
|
|
|
|
Lwt.return (None, ())
|
|
|
|
end
|
|
|
|
|
2018-02-16 04:26:24 +04:00
|
|
|
let get chain_state =
|
|
|
|
read_chain_data chain_state begin fun _chain_data_store data ->
|
2017-11-30 21:34:22 +04:00
|
|
|
Lwt.return (Block.header data.current_head, data.current_mempool)
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2017-02-24 20:17:53 +04:00
|
|
|
let read
|
2017-11-13 19:34:00 +04:00
|
|
|
?patch_context
|
|
|
|
~store_root
|
|
|
|
~context_root
|
|
|
|
() =
|
2017-04-19 23:46:10 +04:00
|
|
|
Store.init store_root >>=? fun global_store ->
|
2017-02-24 20:17:53 +04:00
|
|
|
Context.init ?patch_context ~root:context_root >>= fun context_index ->
|
|
|
|
let global_data = {
|
2018-02-16 04:26:24 +04:00
|
|
|
chains = Chain_id.Table.create 17 ;
|
2017-04-19 23:46:10 +04:00
|
|
|
global_store ;
|
|
|
|
context_index ;
|
2017-02-24 20:17:53 +04:00
|
|
|
} in
|
|
|
|
let state = {
|
|
|
|
global_data = Shared.create global_data ;
|
2017-04-19 23:46:10 +04:00
|
|
|
protocol_store = Shared.create @@ Store.Protocol.get global_store ;
|
2017-02-24 20:17:53 +04:00
|
|
|
} in
|
2018-02-16 04:26:24 +04:00
|
|
|
Chain.read_all state >>=? fun () ->
|
2017-02-24 20:17:53 +04:00
|
|
|
return state
|
2017-05-31 20:27:11 +04:00
|
|
|
|
|
|
|
let close { global_data } =
|
|
|
|
Shared.use global_data begin fun { global_store } ->
|
|
|
|
Store.close global_store ;
|
|
|
|
Lwt.return_unit
|
|
|
|
end
|