Merge branch 'improved_rpc_context' into 'master'
Improved rpc context See merge request !179
This commit is contained in:
commit
c1b4a74bf7
@ -173,6 +173,7 @@ build:docker:alphanet:
|
||||
only:
|
||||
- alphanet@tezos/tezos
|
||||
script:
|
||||
- sed -i s/TEZOS/TEZOS_ALPHANET/ src/node/shell/distributed_db_message.ml
|
||||
- patch -p1 < scripts/alphanet_constants.patch
|
||||
- ./scripts/create_docker_image.sh
|
||||
"${CI_REGISTRY_IMAGE}" "${CI_BUILD_REF}.patched" .
|
||||
@ -185,6 +186,7 @@ build:docker:alphanet_next:
|
||||
only:
|
||||
- master@tezos/tezos
|
||||
script:
|
||||
- sed -i s/TEZOS/TEZOS_ALPHANET_NEXT/ src/node/shell/distributed_db_message.ml
|
||||
- patch -p1 < scripts/alphanet_constants.patch
|
||||
- patch -p1 < scripts/alphanet_next.patch
|
||||
- ./scripts/create_docker_image.sh
|
||||
@ -296,9 +298,7 @@ deploy:alphanet_next:
|
||||
- echo "${CI_KH}" > ~/.ssh/known_hosts
|
||||
- echo "${CI_PK_ALPHANET_NEXT}" > ~/.ssh/id_ed25519
|
||||
- chmod 400 ~/.ssh/id_ed25519
|
||||
- ssh greg@zo.gbzm.fr
|
||||
- ssh tezos@35.167.138.212
|
||||
- ssh tezos@35.165.227.4
|
||||
- echo | ssh -v greg@zo.gbzm.fr
|
||||
allow_failure: true
|
||||
|
||||
cleanup:
|
||||
|
@ -2,11 +2,11 @@ FROM alpine:$alpine_version
|
||||
|
||||
LABEL distro_style="apk" distro="alpine" distro_long="alpine-$alpine_version" arch="x86_64" operatingsystem="linux"
|
||||
|
||||
RUN apk update && \
|
||||
RUN adduser -S tezos && \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add sudo bash libssl1.0 libsodium libev gmp git && \
|
||||
apk add sudo bash libssl1.0 libsodium libev gmp git nginx && \
|
||||
rm -f /var/cache/apk/* && \
|
||||
adduser -S tezos && \
|
||||
echo 'tezos ALL=(ALL:ALL) NOPASSWD:ALL' > /etc/sudoers.d/tezos && \
|
||||
chmod 440 /etc/sudoers.d/tezos && \
|
||||
chown root:root /etc/sudoers.d/tezos && \
|
||||
@ -30,6 +30,9 @@ RUN sudo cp scripts/docker_entrypoint.sh /usr/local/bin/tezos && \
|
||||
RUN sudo mkdir -p /var/run/tezos && \
|
||||
sudo chown tezos /var/run/tezos
|
||||
|
||||
RUN sudo mkdir -p /run/nginx && \
|
||||
sudo cp scripts/nginx.conf /etc/nginx
|
||||
|
||||
ENV EDITOR=vi
|
||||
|
||||
VOLUME /var/run/tezos
|
||||
|
@ -128,7 +128,7 @@ check_volume() {
|
||||
|
||||
clear_volume() {
|
||||
if check_volume ; then
|
||||
docker volume rm "$docker_volume"
|
||||
docker volume rm "$docker_volume" > /dev/null
|
||||
echo "\033[32mThe blockchain data has been removed from the disk.\033[0m"
|
||||
else
|
||||
echo "\033[32mNo remaining data to be removed from the disk.\033[0m"
|
||||
@ -170,11 +170,13 @@ start_container() {
|
||||
fi
|
||||
docker rm "$docker_container" || true > /dev/null 2>&1
|
||||
echo "Launching the docker container..."
|
||||
docker run -dit -p "$port:$port" \
|
||||
docker run --rm -dit -p "$port:$port" -p "8732:80" \
|
||||
-v $docker_volume:/var/run/tezos \
|
||||
--entrypoint /bin/sh \
|
||||
--name "$docker_container" \
|
||||
"$docker_image" > /dev/null
|
||||
docker exec --user root --detach "$docker_container" \
|
||||
nginx -c /etc/nginx/nginx.conf
|
||||
may_restore_identity
|
||||
may_restore_accounts
|
||||
fi
|
||||
@ -188,7 +190,7 @@ stop_container() {
|
||||
save_identity ## Saving again, just in case...
|
||||
save_accounts
|
||||
printf "Stopping the container... "
|
||||
docker stop "$docker_container"
|
||||
docker stop "$docker_container" >/dev/null
|
||||
echo " done"
|
||||
}
|
||||
|
||||
@ -400,10 +402,16 @@ assert_uptodate() {
|
||||
|
||||
update_script() {
|
||||
pull_image
|
||||
tmp="$(docker run -dit --entrypoint /bin/true "$docker_image")"
|
||||
docker cp "$tmp:home/tezos/scripts/alphanet.sh" "$0"
|
||||
docker stop "$tmp"
|
||||
tmp="$(docker run --rm -dit --entrypoint /bin/true "$docker_image")"
|
||||
docker cp "$tmp:home/tezos/scripts/alphanet.sh" ".alphanet.sh.new"
|
||||
docker stop "$tmp" > /dev/null
|
||||
if ! diff .alphanet.sh.new "$0" >/dev/null 2>&1 ; then
|
||||
mv .alphanet.sh.new "$0"
|
||||
echo "\033[32mThe script has been updated.\033[0m"
|
||||
else
|
||||
rm .alphanet.sh.new
|
||||
echo "\033[32mThe script is up to date.\033[0m"
|
||||
fi
|
||||
}
|
||||
|
||||
usage() {
|
||||
@ -470,6 +478,10 @@ case "$command" in
|
||||
exec "$0" start "$@"
|
||||
;;
|
||||
clear)
|
||||
if check_container; then
|
||||
echo "\033[31mCannot clear data while the container is running.\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
clear_volume
|
||||
;;
|
||||
status)
|
||||
|
@ -22,15 +22,3 @@ diff --git a/src/proto/alpha/constants_repr.ml b/src/proto/alpha/constants_repr.
|
||||
max_signing_slot = 15 ;
|
||||
instructions_per_transaction = 16 * 1024 ;
|
||||
proof_of_work_threshold =
|
||||
diff --git a/src/node/shell/distributed_db_message.ml b/src/node/shell/distributed_db_message.ml
|
||||
--- a/src/node/shell/distributed_db_message.ml
|
||||
+++ b/src/node/shell/distributed_db_message.ml
|
||||
@@ -144,7 +144,7 @@ let encoding =
|
||||
|
||||
let versions =
|
||||
let open P2p.Version in
|
||||
- [ { name = "TEZOS" ;
|
||||
+ [ { name = "TEZOS_ALPHANET" ;
|
||||
major = 0 ;
|
||||
minor = 5 ;
|
||||
}
|
||||
|
@ -1,15 +1,3 @@
|
||||
diff --git a/src/node/shell/distributed_db_message.ml b/src/node/shell/distributed_db_message.ml
|
||||
--- a/src/node/shell/distributed_db_message.ml
|
||||
+++ b/src/node/shell/distributed_db_message.ml
|
||||
@@ -144,7 +144,7 @@ let encoding =
|
||||
|
||||
let versions =
|
||||
let open P2p.Version in
|
||||
- [ { name = "TEZOS_ALPHANET" ;
|
||||
+ [ { name = "TEZOS_ALPHANET_NEXT" ;
|
||||
major = 0 ;
|
||||
minor = 5 ;
|
||||
}
|
||||
diff --git a/scripts/alphanet.sh b/scripts/alphanet.sh
|
||||
--- a/scripts/alphanet.sh
|
||||
+++ b/scripts/alphanet.sh
|
||||
|
@ -28,12 +28,12 @@ wait_for_the_node_to_be_bootstraped() {
|
||||
may_create_identity() {
|
||||
if ! $client get balance "my_identity" >/dev/null 2>&1 ; then
|
||||
echo "Generating new manager key (known as 'my_identity')..."
|
||||
$client gen keys my_identity
|
||||
$client gen keys "my_identity"
|
||||
fi
|
||||
if ! $client get balance "my_account" >/dev/null 2>&1 ; then
|
||||
echo "Creating new account for 'my_identity' (known as 'my_account')..."
|
||||
$client forget contract my_account || true >/dev/null 2>&1
|
||||
$client originate free account my_account for my_identity
|
||||
$client forget contract "my_account" >/dev/null 2>&1 || true
|
||||
$client originate free account "my_account" for "my_identity"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -7,20 +7,24 @@ node="${node:=tezos-node}"
|
||||
client="${client:=tezos-client -base-dir \"$client_dir\"}"
|
||||
|
||||
init() {
|
||||
if [ -f "$data_dir/alphanet_version" ] && \
|
||||
[ "$(cat $data_dir/alphanet_version)" \
|
||||
if [ ! -f "$data_dir/alphanet_version" ] || \
|
||||
[ "$(cat "$data_dir/alphanet_version")" \
|
||||
!= "$(cat ~/scripts/alphanet_version)" ]; then
|
||||
echo "\033[33mThe alphanet chain has been reset\033[0m"
|
||||
mv "$node_dir/identity.json" /tmp
|
||||
mv "$client_dir/public key hashs" /tmp
|
||||
mv "$client_dir/public keys" /tmp
|
||||
mv "$client_dir/secret keys" /tmp
|
||||
echo -e "\033[33mThe alphanet chain has been reset\033[0m"
|
||||
mkdir -p "$data_dir/bak"
|
||||
mv "$node_dir/identity.json" \
|
||||
"$client_dir/public key hashs" \
|
||||
"$client_dir/public keys" \
|
||||
"$client_dir/secret keys" \
|
||||
"$data_dir/bak"
|
||||
rm -rf "$node_dir" "$client_dir"
|
||||
mkdir -p "$node_dir" "$client_dir"
|
||||
mv "/tmp/identity.json" "$node_dir/"
|
||||
mv "/tmp/public key hashs" "$client_dir/"
|
||||
mv "/tmp/public keys" "$client_dir/"
|
||||
mv "/tmp/secret keys" "$client_dir/"
|
||||
mv "$data_dir/bak/identity.json" "$node_dir/"
|
||||
mv "$data_dir/bak/public key hashs" "$client_dir/"
|
||||
mv "$data_dir/bak/public keys" "$client_dir/"
|
||||
mv "$data_dir/bak/secret keys" "$client_dir/"
|
||||
rmdir "$data_dir/bak"
|
||||
cp ~/scripts/alphanet_version "$data_dir/alphanet_version"
|
||||
fi
|
||||
if [ ! -f "$node_dir/config.json" ]; then
|
||||
"$node" config init \
|
||||
|
31
scripts/nginx.conf
Normal file
31
scripts/nginx.conf
Normal file
@ -0,0 +1,31 @@
|
||||
# /etc/nginx/nginx.conf
|
||||
|
||||
user nginx;
|
||||
|
||||
worker_processes 1;
|
||||
error_log off;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
server_tokens off;
|
||||
client_max_body_size 0;
|
||||
keepalive_timeout 65;
|
||||
tcp_nodelay on;
|
||||
access_log off;
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8732/;
|
||||
}
|
||||
location ~ ^/(validate_block|network/connection/.*/kick|network/connect/|(forge|inject)_(block|operation|protocol)/) {
|
||||
return 404;
|
||||
}
|
||||
location = /404.html {
|
||||
internal;
|
||||
}
|
||||
}
|
||||
}
|
@ -267,7 +267,7 @@ module Helpers = struct
|
||||
let block cctxt block shell proto =
|
||||
call_error_service1 cctxt
|
||||
Services.Helpers.Parse.block block
|
||||
({ shell ; proto } : Updater.raw_block)
|
||||
({ shell ; proto } : Updater.raw_block_header)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -342,7 +342,7 @@ module Helpers : sig
|
||||
proto_operation list tzresult Lwt.t
|
||||
val block:
|
||||
Client_rpcs.config ->
|
||||
block -> Updater.shell_block -> MBytes.t ->
|
||||
block -> Updater.shell_block_header -> MBytes.t ->
|
||||
Block.proto_header tzresult Lwt.t
|
||||
end
|
||||
|
||||
|
@ -150,7 +150,9 @@ module RPC = struct
|
||||
test_network: Context.test_network;
|
||||
}
|
||||
|
||||
let convert (block: State.Valid_block.t) = {
|
||||
let convert (block: State.Valid_block.t) =
|
||||
Lazy.force block.operation_hashes >>= fun operations ->
|
||||
Lwt.return {
|
||||
hash = block.hash ;
|
||||
net_id = block.net_id ;
|
||||
level = block.level ;
|
||||
@ -160,7 +162,7 @@ module RPC = struct
|
||||
operations_hash = block.operations_hash ;
|
||||
fitness = block.fitness ;
|
||||
data = block.proto_header ;
|
||||
operations = Some block.operations ;
|
||||
operations = Some operations ;
|
||||
protocol = block.protocol_hash ;
|
||||
test_network = block.test_network ;
|
||||
}
|
||||
@ -174,7 +176,7 @@ module RPC = struct
|
||||
| Some (net_db, _block) ->
|
||||
let net = Distributed_db.state net_db in
|
||||
State.Valid_block.read_exn net hash >>= fun block ->
|
||||
Lwt.return (convert block)
|
||||
convert block
|
||||
| None ->
|
||||
Lwt.fail Not_found
|
||||
|
||||
@ -249,15 +251,15 @@ module RPC = struct
|
||||
let block_info node (block: block) =
|
||||
match block with
|
||||
| `Genesis ->
|
||||
State.Valid_block.Current.genesis node.mainnet_net >|= convert
|
||||
State.Valid_block.Current.genesis node.mainnet_net >>= convert
|
||||
| ( `Head n | `Test_head n ) as block ->
|
||||
let validator = get_validator node block in
|
||||
let net_db = Validator.net_db validator in
|
||||
let net_state = Validator.net_state validator in
|
||||
State.Valid_block.Current.head net_state >>= fun head ->
|
||||
get_pred net_db n head >|= convert
|
||||
get_pred net_db n head >>= convert
|
||||
| `Hash h ->
|
||||
read_valid_block_exn node h >|= convert
|
||||
read_valid_block_exn node h >>= convert
|
||||
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
||||
let validator = get_validator node block in
|
||||
let pv = Validator.prevalidator validator in
|
||||
@ -293,11 +295,24 @@ module RPC = struct
|
||||
test_network ;
|
||||
}
|
||||
|
||||
let rpc_context block : Updater.rpc_context =
|
||||
{ context = block.State.Valid_block.context ;
|
||||
level = Int32.succ block.level ;
|
||||
let rpc_context (block : State.Valid_block.t) : Updater.rpc_context =
|
||||
{ block_hash = block.hash ;
|
||||
block_header = {
|
||||
shell = {
|
||||
net_id = block.net_id ;
|
||||
level = block.level ;
|
||||
proto_level = block.proto_level ;
|
||||
predecessor = block.predecessor ;
|
||||
timestamp = block.timestamp ;
|
||||
operations_hash = block.operations_hash ;
|
||||
fitness = block.fitness ;
|
||||
timestamp = block. timestamp }
|
||||
} ;
|
||||
proto = block.proto_header ;
|
||||
} ;
|
||||
operation_hashes = (fun () -> Lazy.force block.operation_hashes) ;
|
||||
operations = (fun () -> Lazy.force block.operations) ;
|
||||
context = block.context ;
|
||||
}
|
||||
|
||||
let get_rpc_context node block =
|
||||
match block with
|
||||
@ -317,38 +332,71 @@ module RPC = struct
|
||||
| Some block -> Some (rpc_context block)
|
||||
end
|
||||
| ( `Prevalidation | `Test_prevalidation ) as block ->
|
||||
let validator, net = get_net node block in
|
||||
let validator, net_db = get_net node block in
|
||||
let pv = Validator.prevalidator validator in
|
||||
let net_state = Validator.net_state validator in
|
||||
State.Valid_block.Current.head net_state >>= fun head ->
|
||||
Prevalidator.context pv >>= function
|
||||
| Error _ -> Lwt.fail Not_found
|
||||
| Ok { context ; fitness } ->
|
||||
let timestamp = Prevalidator.timestamp pv in
|
||||
State.Valid_block.Current.head
|
||||
(Distributed_db.state net) >>= fun { level } ->
|
||||
let level = Int32.succ level in
|
||||
Lwt.return (Some { Updater.context ; fitness ; timestamp ; level })
|
||||
Context.get_protocol context >>= fun protocol ->
|
||||
let proto_level =
|
||||
if Protocol_hash.equal protocol head.protocol_hash then
|
||||
head.proto_level
|
||||
else
|
||||
((head.proto_level + 1) mod 256) in
|
||||
let operation_hashes =
|
||||
let pv_result, _ = Prevalidator.operations pv in
|
||||
[ pv_result.applied ] in
|
||||
let operations_hash =
|
||||
Operation_list_list_hash.compute
|
||||
(List.map Operation_list_hash.compute operation_hashes) in
|
||||
Lwt.return (Some {
|
||||
Updater.block_hash = prevalidation_hash ;
|
||||
block_header = {
|
||||
shell = {
|
||||
net_id = head.net_id ;
|
||||
level = Int32.succ head.level ;
|
||||
proto_level ;
|
||||
predecessor = head.hash ;
|
||||
timestamp = Prevalidator.timestamp pv ;
|
||||
operations_hash ;
|
||||
fitness ;
|
||||
} ;
|
||||
proto = MBytes.create 0 ;
|
||||
} ;
|
||||
operation_hashes = (fun () -> Lwt.return operation_hashes) ;
|
||||
operations = begin fun () ->
|
||||
Lwt_list.map_p
|
||||
(Lwt_list.map_p
|
||||
(Distributed_db.Operation.read_exn net_db))
|
||||
operation_hashes
|
||||
end ;
|
||||
context ;
|
||||
})
|
||||
|
||||
let operations node block =
|
||||
match block with
|
||||
| `Genesis ->
|
||||
State.Valid_block.Current.genesis node.mainnet_net >>= fun { operations } ->
|
||||
Lwt.return operations
|
||||
State.Valid_block.Current.genesis node.mainnet_net >>= fun { operation_hashes } ->
|
||||
Lazy.force operation_hashes
|
||||
| ( `Head n | `Test_head n ) as block ->
|
||||
let validator = get_validator node block in
|
||||
let net_state = Validator.net_state validator in
|
||||
let net_db = Validator.net_db validator in
|
||||
State.Valid_block.Current.head net_state >>= fun head ->
|
||||
get_pred net_db n head >>= fun { operations } ->
|
||||
Lwt.return operations
|
||||
get_pred net_db n head >>= fun { operation_hashes } ->
|
||||
Lazy.force operation_hashes
|
||||
| (`Prevalidation | `Test_prevalidation) as block ->
|
||||
let validator, _net = get_net node block in
|
||||
let pv = Validator.prevalidator validator in
|
||||
let { Prevalidation.applied }, _ = Prevalidator.operations pv in
|
||||
Lwt.return [applied]
|
||||
| `Hash hash ->
|
||||
read_valid_block node hash >|= function
|
||||
| None -> []
|
||||
| Some { operations } -> operations
|
||||
read_valid_block node hash >>= function
|
||||
| None -> Lwt.return_nil
|
||||
| Some { operation_hashes } ->
|
||||
Lazy.force operation_hashes
|
||||
|
||||
let operation_content node hash =
|
||||
Distributed_db.read_operation node.distributed_db hash >>= fun op ->
|
||||
@ -464,13 +512,12 @@ module RPC = struct
|
||||
| Some (_, net_db) ->
|
||||
State.Valid_block.known_heads (Distributed_db.state net_db)
|
||||
end >>= fun test_heads ->
|
||||
let map =
|
||||
List.fold_left
|
||||
Lwt_list.fold_left_s
|
||||
(fun map block ->
|
||||
convert block >|= fun bi ->
|
||||
Block_hash.Map.add
|
||||
block.State.Valid_block.hash (convert block) map)
|
||||
Block_hash.Map.empty (test_heads @ heads) in
|
||||
Lwt.return map
|
||||
block.State.Valid_block.hash bi map)
|
||||
Block_hash.Map.empty (test_heads @ heads)
|
||||
|
||||
let predecessors node len head =
|
||||
let rec loop net_db acc len hash (block: State.Block_header.t) =
|
||||
@ -494,7 +541,7 @@ module RPC = struct
|
||||
try
|
||||
let rec loop acc len hash =
|
||||
State.Valid_block.read_exn state hash >>= fun block ->
|
||||
let bi = convert block in
|
||||
convert block >>= fun bi ->
|
||||
if Block_hash.equal bi.predecessor hash then
|
||||
Lwt.return (List.rev (bi :: acc))
|
||||
else begin
|
||||
@ -513,12 +560,12 @@ module RPC = struct
|
||||
Distributed_db.read_block_exn
|
||||
node.distributed_db head >>= fun (net_db, _block) ->
|
||||
let net_state = Distributed_db.state net_db in
|
||||
predecessors_bi net_state ignored len head >|= fun predecessors ->
|
||||
predecessors_bi net_state ignored len head >>= fun predecessors ->
|
||||
let ignored =
|
||||
List.fold_right
|
||||
(fun x s -> Block_hash.Set.add x.hash s)
|
||||
predecessors ignored in
|
||||
ignored, predecessors :: acc
|
||||
Lwt.return (ignored, predecessors :: acc)
|
||||
)
|
||||
(Block_hash.Set.empty, [])
|
||||
heads >>= fun (_, blocks) ->
|
||||
@ -528,7 +575,7 @@ module RPC = struct
|
||||
|
||||
let valid_block_watcher node =
|
||||
let stream, shutdown = Validator.global_watcher node.validator in
|
||||
Lwt_stream.map (fun block -> convert block) stream,
|
||||
Lwt_stream.map_s (fun block -> convert block) stream,
|
||||
shutdown
|
||||
|
||||
let operation_watcher node =
|
||||
|
@ -116,7 +116,8 @@ and valid_block = {
|
||||
timestamp: Time.t ;
|
||||
fitness: Protocol.fitness ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
|
||||
operations: Store.Operation.t list list Lwt.t Lazy.t ;
|
||||
discovery_time: Time.t ;
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
||||
@ -128,7 +129,7 @@ and valid_block = {
|
||||
}
|
||||
|
||||
let build_valid_block
|
||||
hash header operations
|
||||
hash header operation_hashes operations
|
||||
context discovery_time successors invalid_successors =
|
||||
Context.get_protocol context >>= fun protocol_hash ->
|
||||
Context.get_test_network context >>= fun test_network ->
|
||||
@ -142,6 +143,7 @@ let build_valid_block
|
||||
timestamp = header.shell.timestamp ;
|
||||
discovery_time ;
|
||||
operations_hash = header.shell.operations_hash ;
|
||||
operation_hashes ;
|
||||
operations ;
|
||||
fitness = header.shell.fitness ;
|
||||
protocol_hash ;
|
||||
@ -724,6 +726,9 @@ module Block_header = struct
|
||||
let read_operations s k =
|
||||
Raw_operation_list.read_all s.block_header_store k
|
||||
|
||||
let read_operations_exn s k =
|
||||
Raw_operation_list.read_all_exn s.block_header_store k
|
||||
|
||||
let mark_invalid net hash errors =
|
||||
mark_invalid net hash errors >>= fun marked ->
|
||||
if not marked then
|
||||
@ -909,7 +914,8 @@ module Raw_net = struct
|
||||
Lwt.return context
|
||||
end >>= fun context ->
|
||||
build_valid_block
|
||||
genesis.block header [] context genesis.time
|
||||
genesis.block header (lazy Lwt.return_nil) (lazy Lwt.return_nil)
|
||||
context genesis.time
|
||||
Block_hash.Set.empty Block_hash.Set.empty >>= fun genesis_block ->
|
||||
Lwt.return @@
|
||||
build
|
||||
@ -936,7 +942,8 @@ module Valid_block = struct
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
|
||||
operations: Store.Operation.t list list Lwt.t Lazy.t ;
|
||||
discovery_time: Time.t ;
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
protocol: (module Updater.REGISTRED_PROTOCOL) option ;
|
||||
@ -953,7 +960,9 @@ module Valid_block = struct
|
||||
let known { context_index } hash =
|
||||
Context.exists context_index hash
|
||||
|
||||
let raw_read block operations time chain_store context_index hash =
|
||||
let raw_read
|
||||
block operations operation_hashes
|
||||
time chain_store context_index hash =
|
||||
Context.checkout context_index hash >>= function
|
||||
| None ->
|
||||
fail (Unknown_context hash)
|
||||
@ -962,12 +971,15 @@ module Valid_block = struct
|
||||
>>= fun successors ->
|
||||
Store.Chain.Invalid_successors.read_all (chain_store, hash)
|
||||
>>= fun invalid_successors ->
|
||||
build_valid_block hash block operations
|
||||
build_valid_block hash block operation_hashes operations
|
||||
context time successors invalid_successors >>= fun block ->
|
||||
return block
|
||||
|
||||
let raw_read_exn block operations time chain_store context_index hash =
|
||||
raw_read block operations time chain_store context_index hash >>= function
|
||||
let raw_read_exn
|
||||
block operations operation_hashes
|
||||
time chain_store context_index hash =
|
||||
raw_read block operations operation_hashes
|
||||
time chain_store context_index hash >>= function
|
||||
| Error _ -> Lwt.fail Not_found
|
||||
| Ok data -> Lwt.return data
|
||||
|
||||
@ -976,8 +988,17 @@ module Valid_block = struct
|
||||
| None | Some { Time.data = Error _ } ->
|
||||
fail (Unknown_block hash)
|
||||
| Some { Time.data = Ok block ; time } ->
|
||||
Block_header.read_operations net hash >>=? fun operations ->
|
||||
raw_read block operations
|
||||
let operation_hashes =
|
||||
lazy (Block_header.read_operations_exn net hash) in
|
||||
let operations =
|
||||
lazy (
|
||||
Lazy.force operation_hashes >>= fun operations ->
|
||||
Lwt_list.map_p
|
||||
(Lwt_list.map_p
|
||||
(Raw_operation.read_exn net.operation_store ))
|
||||
operations)
|
||||
in
|
||||
raw_read block operations operation_hashes
|
||||
time net_state.chain_store net_state.context_index hash
|
||||
|
||||
let read_opt net net_state hash =
|
||||
@ -991,6 +1012,7 @@ module Valid_block = struct
|
||||
| Ok data -> Lwt.return data
|
||||
|
||||
let store
|
||||
operation_store
|
||||
block_header_store
|
||||
(net_state: net_state)
|
||||
valid_block_watcher
|
||||
@ -1011,8 +1033,6 @@ module Valid_block = struct
|
||||
Raw_block_header.Locked.mark_valid
|
||||
block_header_store hash >>= fun _marked ->
|
||||
(* TODO fail if the block was previsouly stored ... ??? *)
|
||||
Operation_list.Locked.read_all
|
||||
block_header_store hash >>=? fun operations ->
|
||||
(* Let's commit the context. *)
|
||||
let message =
|
||||
match message with
|
||||
@ -1031,8 +1051,17 @@ module Valid_block = struct
|
||||
Store.Chain.Valid_successors.store
|
||||
(store, predecessor) hash >>= fun () ->
|
||||
(* Build the `valid_block` value. *)
|
||||
let operation_hashes =
|
||||
lazy (Operation_list.Locked.read_all_exn block_header_store hash) in
|
||||
let operations =
|
||||
lazy (
|
||||
Lazy.force operation_hashes >>= fun operations ->
|
||||
Lwt_list.map_p
|
||||
(Lwt_list.map_p
|
||||
(Raw_operation.read_exn operation_store ))
|
||||
operations) in
|
||||
raw_read_exn
|
||||
block operations discovery_time
|
||||
block operations operation_hashes discovery_time
|
||||
net_state.chain_store net_state.context_index hash >>= fun valid_block ->
|
||||
Watcher.notify valid_block_watcher valid_block ;
|
||||
Lwt.return (Ok valid_block)
|
||||
@ -1067,7 +1096,7 @@ module Valid_block = struct
|
||||
block_header_store hash >>= function
|
||||
| Some _ -> return None (* Previously invalidated block. *)
|
||||
| None ->
|
||||
Locked.store
|
||||
Locked.store net.operation_store
|
||||
block_header_store net_state net.valid_block_watcher
|
||||
hash vcontext >>=? fun valid_block ->
|
||||
return (Some valid_block)
|
||||
@ -1328,7 +1357,8 @@ module Net = struct
|
||||
Block_header.Locked.read_discovery_time block_header_store
|
||||
genesis_hash >>=? fun genesis_discovery_time ->
|
||||
Valid_block.Locked.raw_read
|
||||
genesis_shell_header [] genesis_discovery_time
|
||||
genesis_shell_header (lazy Lwt.return_nil) (lazy Lwt.return_nil)
|
||||
genesis_discovery_time
|
||||
chain_store context_index genesis_hash >>=? fun genesis_block ->
|
||||
return @@
|
||||
Raw_net.build
|
||||
|
@ -258,8 +258,9 @@ module Valid_block : sig
|
||||
fitness: Protocol.fitness ;
|
||||
(** The (validated) score of the block. *)
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
operations: Operation_hash.t list list ;
|
||||
(** The sequence of operations ans its (Merkle-)hash. *)
|
||||
operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
|
||||
operations: Store.Operation.t list list Lwt.t Lazy.t ;
|
||||
(** The sequence of operations and its (Merkle-)hash. *)
|
||||
discovery_time: Time.t ;
|
||||
(** The data at which the block was discorevered on the P2P network. *)
|
||||
protocol_hash: Protocol_hash.t ;
|
||||
|
@ -350,16 +350,18 @@ module Context_db = struct
|
||||
State.Valid_block.store net_state hash data >>=? function
|
||||
| None ->
|
||||
State.Valid_block.read net_state hash >>=? fun block ->
|
||||
Lazy.force block.operation_hashes >>= fun ophs ->
|
||||
Lwt_list.iter_p
|
||||
(Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash))
|
||||
block.operations >>= fun () ->
|
||||
ophs >>= fun () ->
|
||||
return (Ok block, false)
|
||||
| Some block ->
|
||||
Lazy.force block.operation_hashes >>= fun ophs ->
|
||||
Lwt_list.iter_p
|
||||
(Lwt_list.iter_p (fun hash ->
|
||||
Distributed_db.Operation.commit net_db hash))
|
||||
block.operations >>= fun () ->
|
||||
ophs >>= fun () ->
|
||||
return (Ok block, true)
|
||||
end
|
||||
| Error err ->
|
||||
|
@ -22,7 +22,7 @@ type raw_operation = Store.Operation.t = {
|
||||
proto: MBytes.t ;
|
||||
}
|
||||
|
||||
type shell_block = Store.Block_header.shell_header =
|
||||
type shell_block_header = Store.Block_header.shell_header =
|
||||
{ net_id: Net_id.t ;
|
||||
level: Int32.t ;
|
||||
proto_level: int ; (* uint8 *)
|
||||
@ -32,8 +32,8 @@ type shell_block = Store.Block_header.shell_header =
|
||||
fitness: MBytes.t list ;
|
||||
}
|
||||
|
||||
type raw_block = Store.Block_header.t = {
|
||||
shell: shell_block ;
|
||||
type raw_block_header = Store.Block_header.t = {
|
||||
shell: shell_block_header ;
|
||||
proto: MBytes.t ;
|
||||
}
|
||||
|
||||
@ -44,10 +44,11 @@ type validation_result = {
|
||||
}
|
||||
|
||||
type rpc_context = {
|
||||
block_hash: Block_hash.t ;
|
||||
block_header: raw_block_header ;
|
||||
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
|
||||
operations: unit -> raw_operation list list Lwt.t ;
|
||||
context: Context.t ;
|
||||
level: Int32.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
}
|
||||
|
||||
module type PROTOCOL = sig
|
||||
@ -70,13 +71,13 @@ module type PROTOCOL = sig
|
||||
val precheck_block :
|
||||
ancestor_context: Context.t ->
|
||||
ancestor_timestamp: Time.t ->
|
||||
raw_block ->
|
||||
raw_block_header ->
|
||||
unit tzresult Lwt.t
|
||||
val begin_application :
|
||||
predecessor_context: Context.t ->
|
||||
predecessor_timestamp: Time.t ->
|
||||
predecessor_fitness: Fitness.fitness ->
|
||||
raw_block ->
|
||||
raw_block_header ->
|
||||
validation_state tzresult Lwt.t
|
||||
val begin_construction :
|
||||
predecessor_context: Context.t ->
|
||||
|
@ -11,19 +11,6 @@ open Logging.Updater
|
||||
|
||||
let (//) = Filename.concat
|
||||
|
||||
type validation_result = Protocol.validation_result = {
|
||||
context: Context.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
message: string option ;
|
||||
}
|
||||
|
||||
type rpc_context = Protocol.rpc_context = {
|
||||
context: Context.t ;
|
||||
level: Int32.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
}
|
||||
|
||||
module type PROTOCOL = Protocol.PROTOCOL
|
||||
module type REGISTRED_PROTOCOL = sig
|
||||
val hash: Protocol_hash.t
|
||||
@ -43,7 +30,7 @@ type raw_operation = Store.Operation.t = {
|
||||
}
|
||||
let raw_operation_encoding = Store.Operation.encoding
|
||||
|
||||
type shell_block = Store.Block_header.shell_header = {
|
||||
type shell_block_header = Store.Block_header.shell_header = {
|
||||
net_id: Net_id.t ;
|
||||
level: Int32.t ;
|
||||
proto_level: int ; (* uint8 *)
|
||||
@ -52,13 +39,27 @@ type shell_block = Store.Block_header.shell_header = {
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
}
|
||||
let shell_block_encoding = Store.Block_header.shell_header_encoding
|
||||
let shell_block_header_encoding = Store.Block_header.shell_header_encoding
|
||||
|
||||
type raw_block = Store.Block_header.t = {
|
||||
shell: shell_block ;
|
||||
type raw_block_header = Store.Block_header.t = {
|
||||
shell: shell_block_header ;
|
||||
proto: MBytes.t ;
|
||||
}
|
||||
let raw_block_encoding = Store.Block_header.encoding
|
||||
let raw_block_header_encoding = Store.Block_header.encoding
|
||||
|
||||
type validation_result = Protocol.validation_result = {
|
||||
context: Context.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
message: string option ;
|
||||
}
|
||||
|
||||
type rpc_context = Protocol.rpc_context = {
|
||||
block_hash: Block_hash.t ;
|
||||
block_header: Protocol.raw_block_header ;
|
||||
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
|
||||
operations: unit -> raw_operation list list Lwt.t ;
|
||||
context: Context.t ;
|
||||
}
|
||||
|
||||
(** Version table *)
|
||||
|
||||
|
@ -18,7 +18,7 @@ type raw_operation = Store.Operation.t = {
|
||||
}
|
||||
val raw_operation_encoding: raw_operation Data_encoding.t
|
||||
|
||||
type shell_block = Store.Block_header.shell_header = {
|
||||
type shell_block_header = Store.Block_header.shell_header = {
|
||||
net_id: Net_id.t ;
|
||||
level: Int32.t ;
|
||||
proto_level: int ; (* uint8 *)
|
||||
@ -27,13 +27,13 @@ type shell_block = Store.Block_header.shell_header = {
|
||||
operations_hash: Operation_list_list_hash.t ;
|
||||
fitness: MBytes.t list ;
|
||||
}
|
||||
val shell_block_encoding: shell_block Data_encoding.t
|
||||
val shell_block_header_encoding: shell_block_header Data_encoding.t
|
||||
|
||||
type raw_block = Store.Block_header.t = {
|
||||
shell: shell_block ;
|
||||
type raw_block_header = Store.Block_header.t = {
|
||||
shell: shell_block_header ;
|
||||
proto: MBytes.t ;
|
||||
}
|
||||
val raw_block_encoding: raw_block Data_encoding.t
|
||||
val raw_block_header_encoding: raw_block_header Data_encoding.t
|
||||
|
||||
type validation_result = Protocol.validation_result = {
|
||||
context: Context.t ;
|
||||
@ -42,10 +42,11 @@ type validation_result = Protocol.validation_result = {
|
||||
}
|
||||
|
||||
type rpc_context = Protocol.rpc_context = {
|
||||
block_hash: Block_hash.t ;
|
||||
block_header: raw_block_header ;
|
||||
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
|
||||
operations: unit -> raw_operation list list Lwt.t ;
|
||||
context: Context.t ;
|
||||
level: Int32.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
}
|
||||
|
||||
module type PROTOCOL = Protocol.PROTOCOL
|
||||
|
@ -13,7 +13,7 @@ open Tezos_hash
|
||||
|
||||
(** Exported type *)
|
||||
type header = {
|
||||
shell: Updater.shell_block ;
|
||||
shell: Updater.shell_block_header ;
|
||||
proto: proto_header ;
|
||||
signature: Ed25519.Signature.t ;
|
||||
}
|
||||
@ -46,7 +46,7 @@ let signed_proto_header_encoding =
|
||||
let unsigned_header_encoding =
|
||||
let open Data_encoding in
|
||||
merge_objs
|
||||
Updater.shell_block_encoding
|
||||
Updater.shell_block_header_encoding
|
||||
proto_header_encoding
|
||||
|
||||
(** Constants *)
|
||||
@ -64,7 +64,7 @@ type error +=
|
||||
let parse_header
|
||||
({ shell = { net_id ; level ; proto_level ; predecessor ;
|
||||
timestamp ; fitness ; operations_hash } ;
|
||||
proto } : Updater.raw_block) : header tzresult =
|
||||
proto } : Updater.raw_block_header) : header tzresult =
|
||||
match Data_encoding.Binary.of_bytes signed_proto_header_encoding proto with
|
||||
| None -> Error [Cant_parse_proto_header]
|
||||
| Some (proto, signature) ->
|
||||
|
@ -11,7 +11,7 @@ open Tezos_hash
|
||||
|
||||
(** Exported type *)
|
||||
type header = {
|
||||
shell: Updater.shell_block ;
|
||||
shell: Updater.shell_block_header ;
|
||||
proto: proto_header ;
|
||||
signature: Ed25519.Signature.t ;
|
||||
}
|
||||
@ -26,16 +26,16 @@ and proto_header = {
|
||||
val max_header_length: int
|
||||
|
||||
(** Parse the protocol-specific part of a block header. *)
|
||||
val parse_header: Updater.raw_block -> header tzresult
|
||||
val parse_header: Updater.raw_block_header -> header tzresult
|
||||
|
||||
val proto_header_encoding:
|
||||
proto_header Data_encoding.encoding
|
||||
|
||||
val unsigned_header_encoding:
|
||||
(Updater.shell_block * proto_header) Data_encoding.encoding
|
||||
(Updater.shell_block_header * proto_header) Data_encoding.encoding
|
||||
|
||||
val forge_header:
|
||||
Updater.shell_block -> proto_header -> MBytes.t
|
||||
Updater.shell_block_header -> proto_header -> MBytes.t
|
||||
(** [forge_header shell_hdr proto_hdr] is the binary serialization
|
||||
(using [unsigned_header_encoding]) of a block header,
|
||||
comprising both the shell and the protocol part of the header,
|
||||
|
@ -321,6 +321,19 @@ end
|
||||
|
||||
type error += Cannot_parse_operation
|
||||
|
||||
let encoding =
|
||||
let open Data_encoding in
|
||||
conv
|
||||
(fun { hash ; shell ; contents ; signature } ->
|
||||
(hash, (shell, (contents, signature))))
|
||||
(fun (hash, (shell, (contents, signature))) ->
|
||||
{ hash ; shell ; contents ; signature })
|
||||
(merge_objs
|
||||
(obj1 (req "hash" Operation_hash.encoding))
|
||||
(merge_objs
|
||||
Updater.shell_operation_encoding
|
||||
Encoding.signed_proto_operation_encoding))
|
||||
|
||||
let () =
|
||||
register_error_kind
|
||||
`Branch
|
||||
|
@ -83,6 +83,8 @@ and counter = Int32.t
|
||||
|
||||
type error += Cannot_parse_operation (* `Branch *)
|
||||
|
||||
val encoding: operation Data_encoding.t
|
||||
|
||||
val parse:
|
||||
Operation_hash.t -> Updater.raw_operation -> operation tzresult
|
||||
|
||||
|
@ -34,6 +34,15 @@ let wrap_tzerror encoding =
|
||||
(fun x -> Error x) ;
|
||||
]
|
||||
|
||||
|
||||
let operations custom_root =
|
||||
RPC.service
|
||||
~description: "All the operations of the block (parsed)."
|
||||
~input: empty
|
||||
~output: (wrap_tzerror @@
|
||||
(list (list (dynamic_size Operation.encoding))))
|
||||
RPC.Path.(custom_root / "operations")
|
||||
|
||||
module Constants = struct
|
||||
|
||||
let cycle_length custom_root =
|
||||
@ -592,7 +601,7 @@ module Helpers = struct
|
||||
let block custom_root =
|
||||
RPC.service
|
||||
~description:"Parse a block"
|
||||
~input: Updater.raw_block_encoding
|
||||
~input: Updater.raw_block_header_encoding
|
||||
~output: (wrap_tzerror Block.proto_header_encoding)
|
||||
RPC.Path.(custom_root / "helpers" / "parse" / "block" )
|
||||
|
||||
|
@ -9,32 +9,65 @@
|
||||
|
||||
open Tezos_context
|
||||
|
||||
let rpc_init { Updater.context ; level ; timestamp ; fitness } =
|
||||
Tezos_context.init ~level ~timestamp ~fitness context
|
||||
type rpc_context = {
|
||||
block_hash: Block_hash.t ;
|
||||
block_header: Updater.raw_block_header ;
|
||||
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
|
||||
operations: unit -> Updater.raw_operation list list Lwt.t ;
|
||||
context: Tezos_context.t ;
|
||||
}
|
||||
|
||||
let rpc_init
|
||||
({ block_hash ; block_header ;
|
||||
operation_hashes ; operations ; context } : Updater.rpc_context) =
|
||||
let level = Int32.succ block_header.shell.level in
|
||||
let timestamp = block_header.shell.timestamp in
|
||||
let fitness = block_header.shell.fitness in
|
||||
Tezos_context.init ~level ~timestamp ~fitness context >>=? fun context ->
|
||||
return { block_hash ; block_header ; operation_hashes ; operations ; context }
|
||||
|
||||
let rpc_services = ref (RPC.empty : Updater.rpc_context RPC.directory)
|
||||
let register0 s f =
|
||||
|
||||
let register0_fullctxt s f =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun ctxt () ->
|
||||
( rpc_init ctxt >>=? fun ctxt ->
|
||||
f ctxt ) >>= RPC.Answer.return)
|
||||
let register1 s f =
|
||||
let register0 s f = register0_fullctxt s (fun { context } -> f context)
|
||||
|
||||
let register1_fullctxt s f =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun ctxt arg ->
|
||||
( rpc_init ctxt >>=? fun ctxt ->
|
||||
f ctxt arg ) >>= RPC.Answer.return)
|
||||
let register2 s f =
|
||||
let register1 s f = register1_fullctxt s (fun { context } x -> f context x)
|
||||
let register1_noctxt s f =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun _ arg -> f arg >>= RPC.Answer.return)
|
||||
|
||||
let register2_fullctxt s f =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun (ctxt, arg1) arg2 ->
|
||||
( rpc_init ctxt >>=? fun ctxt ->
|
||||
f ctxt arg1 arg2 ) >>= RPC.Answer.return)
|
||||
let register1_noctxt s f =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun _ arg -> f arg >>= RPC.Answer.return)
|
||||
let register2 s f = register2_fullctxt s (fun { context } x y -> f context x y)
|
||||
|
||||
|
||||
(*-- Operations --------------------------------------------------------------*)
|
||||
|
||||
let () =
|
||||
register0_fullctxt
|
||||
Services.operations
|
||||
(fun { operation_hashes ; operations } ->
|
||||
operation_hashes () >>= fun operation_hashes ->
|
||||
operations () >>= fun operations ->
|
||||
map2_s
|
||||
(map2_s (fun x y -> Lwt.return (Operation.parse x y)))
|
||||
operation_hashes operations)
|
||||
|
||||
(*-- Constants ---------------------------------------------------------------*)
|
||||
|
||||
@ -146,7 +179,7 @@ let () =
|
||||
rpc_services :=
|
||||
RPC.register !rpc_services (s RPC.Path.root)
|
||||
(fun (ctxt, contract) arg ->
|
||||
( rpc_init ctxt >>=? fun ctxt ->
|
||||
( rpc_init ctxt >>=? fun { context = ctxt } ->
|
||||
Contract.exists ctxt contract >>=? function
|
||||
| true -> f ctxt contract arg
|
||||
| false -> raise Not_found ) >>= RPC.Answer.return) in
|
||||
|
@ -491,6 +491,8 @@ and counter = Int32.t
|
||||
|
||||
module Operation : sig
|
||||
|
||||
val encoding: operation Data_encoding.t
|
||||
|
||||
type error += Cannot_parse_operation (* `Branch *)
|
||||
val parse:
|
||||
Operation_hash.t -> Updater.raw_operation -> operation tzresult
|
||||
@ -517,7 +519,7 @@ end
|
||||
module Block : sig
|
||||
|
||||
type header = {
|
||||
shell: Updater.shell_block ;
|
||||
shell: Updater.shell_block_header ;
|
||||
proto: proto_header ;
|
||||
signature: Ed25519.Signature.t ;
|
||||
}
|
||||
@ -530,16 +532,16 @@ module Block : sig
|
||||
|
||||
val max_header_length: int
|
||||
|
||||
val parse_header: Updater.raw_block -> header tzresult
|
||||
val parse_header: Updater.raw_block_header -> header tzresult
|
||||
|
||||
val proto_header_encoding:
|
||||
proto_header Data_encoding.encoding
|
||||
|
||||
val unsigned_header_encoding:
|
||||
(Updater.shell_block * proto_header) Data_encoding.encoding
|
||||
(Updater.shell_block_header * proto_header) Data_encoding.encoding
|
||||
|
||||
val forge_header:
|
||||
Updater.shell_block -> proto_header -> MBytes.t
|
||||
Updater.shell_block_header -> proto_header -> MBytes.t
|
||||
|
||||
end
|
||||
|
||||
|
@ -88,6 +88,16 @@ val iter_s : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t
|
||||
|
||||
(** A {!List.map} in the monad *)
|
||||
val map_s : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t
|
||||
val map_p : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t
|
||||
|
||||
(** A {!List.map2} in the monad *)
|
||||
val map2 :
|
||||
('a -> 'b -> 'c tzresult) -> 'a list -> 'b list -> 'c list tzresult
|
||||
|
||||
(** A {!List.map2} in the monad *)
|
||||
val map2_s :
|
||||
('a -> 'b -> 'c tzresult Lwt.t) -> 'a list -> 'b list ->
|
||||
'c list tzresult Lwt.t
|
||||
|
||||
(** A {!List.map_filter} in the monad *)
|
||||
val map_filter_s : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t
|
||||
|
@ -16,7 +16,7 @@ val raw_operation_encoding: raw_operation Data_encoding.t
|
||||
|
||||
|
||||
(** The version agnostic toplevel structure of blocks. *)
|
||||
type shell_block = {
|
||||
type shell_block_header = {
|
||||
net_id: Net_id.t ;
|
||||
(** The genesis of the chain this block belongs to. *)
|
||||
level: Int32.t ;
|
||||
@ -34,13 +34,13 @@ type shell_block = {
|
||||
of unsigned bytes. Ordered by length and then by contents
|
||||
lexicographically. *)
|
||||
}
|
||||
val shell_block_encoding: shell_block Data_encoding.t
|
||||
val shell_block_header_encoding: shell_block_header Data_encoding.t
|
||||
|
||||
type raw_block = {
|
||||
shell: shell_block ;
|
||||
type raw_block_header = {
|
||||
shell: shell_block_header ;
|
||||
proto: MBytes.t ;
|
||||
}
|
||||
val raw_block_encoding: raw_block Data_encoding.t
|
||||
val raw_block_header_encoding: raw_block_header Data_encoding.t
|
||||
|
||||
type validation_result = {
|
||||
context: Context.t ;
|
||||
@ -49,10 +49,11 @@ type validation_result = {
|
||||
}
|
||||
|
||||
type rpc_context = {
|
||||
block_hash: Block_hash.t ;
|
||||
block_header: raw_block_header ;
|
||||
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
|
||||
operations: unit -> raw_operation list list Lwt.t ;
|
||||
context: Context.t ;
|
||||
level: Int32.t ;
|
||||
timestamp: Time.t ;
|
||||
fitness: Fitness.fitness ;
|
||||
}
|
||||
|
||||
(** This is the signature of a Tezos protocol implementation. It has
|
||||
@ -104,12 +105,12 @@ module type PROTOCOL = sig
|
||||
val precheck_block :
|
||||
ancestor_context: Context.t ->
|
||||
ancestor_timestamp: Time.t ->
|
||||
raw_block ->
|
||||
raw_block_header ->
|
||||
unit tzresult Lwt.t
|
||||
|
||||
(** The first step in a block validation sequence. Initializes a
|
||||
validation context for validating a block. Takes as argument the
|
||||
{!raw_block} to initialize the context for this block, patching
|
||||
{!raw_block_header} to initialize the context for this block, patching
|
||||
the context resulting of the application of the predecessor
|
||||
block passed as parameter. The function {!precheck_block} may
|
||||
not have been called before [begin_application], so all the
|
||||
@ -118,12 +119,12 @@ module type PROTOCOL = sig
|
||||
predecessor_context: Context.t ->
|
||||
predecessor_timestamp: Time.t ->
|
||||
predecessor_fitness: Fitness.fitness ->
|
||||
raw_block ->
|
||||
raw_block_header ->
|
||||
validation_state tzresult Lwt.t
|
||||
|
||||
(** Initializes a validation context for constructing a new block
|
||||
(as opposed to validating an existing block). Since there is no
|
||||
{!raw_block} header available, the parts that it provides are
|
||||
{!raw_block_header} header available, the parts that it provides are
|
||||
passed as arguments (predecessor block hash, context resulting
|
||||
of the application of the predecessor block, and timestamp). *)
|
||||
val begin_construction :
|
||||
|
@ -52,7 +52,7 @@ module Command = struct
|
||||
|
||||
let forge shell command =
|
||||
Data_encoding.Binary.to_bytes
|
||||
(Data_encoding.tup2 Updater.shell_block_encoding encoding)
|
||||
(Data_encoding.tup2 Updater.shell_block_header_encoding encoding)
|
||||
(shell, command)
|
||||
|
||||
end
|
||||
|
@ -39,7 +39,7 @@ let compare_operations _ _ = 0
|
||||
let max_number_of_operations = 0
|
||||
|
||||
type block = {
|
||||
shell: Updater.shell_block ;
|
||||
shell: Updater.shell_block_header ;
|
||||
command: Data.Command.t ;
|
||||
signature: Ed25519.Signature.t ;
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ module Mining : sig
|
||||
val mine_stamp :
|
||||
Client_proto_rpcs.block ->
|
||||
secret_key ->
|
||||
Updater.shell_block ->
|
||||
Updater.shell_block_header ->
|
||||
int ->
|
||||
Nonce_hash.t ->
|
||||
MBytes.t tzresult Lwt.t
|
||||
|
Loading…
Reference in New Issue
Block a user