Merge branch 'improved_rpc_context' into 'master'

Improved rpc context

See merge request !179
This commit is contained in:
Grégoire Henry 2017-04-17 20:52:01 +02:00
commit c1b4a74bf7
29 changed files with 370 additions and 191 deletions

View File

@ -173,6 +173,7 @@ build:docker:alphanet:
only: only:
- alphanet@tezos/tezos - alphanet@tezos/tezos
script: script:
- sed -i s/TEZOS/TEZOS_ALPHANET/ src/node/shell/distributed_db_message.ml
- patch -p1 < scripts/alphanet_constants.patch - patch -p1 < scripts/alphanet_constants.patch
- ./scripts/create_docker_image.sh - ./scripts/create_docker_image.sh
"${CI_REGISTRY_IMAGE}" "${CI_BUILD_REF}.patched" . "${CI_REGISTRY_IMAGE}" "${CI_BUILD_REF}.patched" .
@ -185,6 +186,7 @@ build:docker:alphanet_next:
only: only:
- master@tezos/tezos - master@tezos/tezos
script: script:
- sed -i s/TEZOS/TEZOS_ALPHANET_NEXT/ src/node/shell/distributed_db_message.ml
- patch -p1 < scripts/alphanet_constants.patch - patch -p1 < scripts/alphanet_constants.patch
- patch -p1 < scripts/alphanet_next.patch - patch -p1 < scripts/alphanet_next.patch
- ./scripts/create_docker_image.sh - ./scripts/create_docker_image.sh
@ -296,9 +298,7 @@ deploy:alphanet_next:
- echo "${CI_KH}" > ~/.ssh/known_hosts - echo "${CI_KH}" > ~/.ssh/known_hosts
- echo "${CI_PK_ALPHANET_NEXT}" > ~/.ssh/id_ed25519 - echo "${CI_PK_ALPHANET_NEXT}" > ~/.ssh/id_ed25519
- chmod 400 ~/.ssh/id_ed25519 - chmod 400 ~/.ssh/id_ed25519
- ssh greg@zo.gbzm.fr - echo | ssh -v greg@zo.gbzm.fr
- ssh tezos@35.167.138.212
- ssh tezos@35.165.227.4
allow_failure: true allow_failure: true
cleanup: cleanup:

View File

@ -2,11 +2,11 @@ FROM alpine:$alpine_version
LABEL distro_style="apk" distro="alpine" distro_long="alpine-$alpine_version" arch="x86_64" operatingsystem="linux" LABEL distro_style="apk" distro="alpine" distro_long="alpine-$alpine_version" arch="x86_64" operatingsystem="linux"
RUN apk update && \ RUN adduser -S tezos && \
apk update && \
apk upgrade && \ apk upgrade && \
apk add sudo bash libssl1.0 libsodium libev gmp git && \ apk add sudo bash libssl1.0 libsodium libev gmp git nginx && \
rm -f /var/cache/apk/* && \ rm -f /var/cache/apk/* && \
adduser -S tezos && \
echo 'tezos ALL=(ALL:ALL) NOPASSWD:ALL' > /etc/sudoers.d/tezos && \ echo 'tezos ALL=(ALL:ALL) NOPASSWD:ALL' > /etc/sudoers.d/tezos && \
chmod 440 /etc/sudoers.d/tezos && \ chmod 440 /etc/sudoers.d/tezos && \
chown root:root /etc/sudoers.d/tezos && \ chown root:root /etc/sudoers.d/tezos && \
@ -30,6 +30,9 @@ RUN sudo cp scripts/docker_entrypoint.sh /usr/local/bin/tezos && \
RUN sudo mkdir -p /var/run/tezos && \ RUN sudo mkdir -p /var/run/tezos && \
sudo chown tezos /var/run/tezos sudo chown tezos /var/run/tezos
RUN sudo mkdir -p /run/nginx && \
sudo cp scripts/nginx.conf /etc/nginx
ENV EDITOR=vi ENV EDITOR=vi
VOLUME /var/run/tezos VOLUME /var/run/tezos

View File

@ -128,7 +128,7 @@ check_volume() {
clear_volume() { clear_volume() {
if check_volume ; then if check_volume ; then
docker volume rm "$docker_volume" docker volume rm "$docker_volume" > /dev/null
echo "\033[32mThe blockchain data has been removed from the disk.\033[0m" echo "\033[32mThe blockchain data has been removed from the disk.\033[0m"
else else
echo "\033[32mNo remaining data to be removed from the disk.\033[0m" echo "\033[32mNo remaining data to be removed from the disk.\033[0m"
@ -170,11 +170,13 @@ start_container() {
fi fi
docker rm "$docker_container" || true > /dev/null 2>&1 docker rm "$docker_container" || true > /dev/null 2>&1
echo "Launching the docker container..." echo "Launching the docker container..."
docker run -dit -p "$port:$port" \ docker run --rm -dit -p "$port:$port" -p "8732:80" \
-v $docker_volume:/var/run/tezos \ -v $docker_volume:/var/run/tezos \
--entrypoint /bin/sh \ --entrypoint /bin/sh \
--name "$docker_container" \ --name "$docker_container" \
"$docker_image" > /dev/null "$docker_image" > /dev/null
docker exec --user root --detach "$docker_container" \
nginx -c /etc/nginx/nginx.conf
may_restore_identity may_restore_identity
may_restore_accounts may_restore_accounts
fi fi
@ -188,7 +190,7 @@ stop_container() {
save_identity ## Saving again, just in case... save_identity ## Saving again, just in case...
save_accounts save_accounts
printf "Stopping the container... " printf "Stopping the container... "
docker stop "$docker_container" docker stop "$docker_container" >/dev/null
echo " done" echo " done"
} }
@ -400,10 +402,16 @@ assert_uptodate() {
update_script() { update_script() {
pull_image pull_image
tmp="$(docker run -dit --entrypoint /bin/true "$docker_image")" tmp="$(docker run --rm -dit --entrypoint /bin/true "$docker_image")"
docker cp "$tmp:home/tezos/scripts/alphanet.sh" "$0" docker cp "$tmp:home/tezos/scripts/alphanet.sh" ".alphanet.sh.new"
docker stop "$tmp" docker stop "$tmp" > /dev/null
echo "\033[32mThe script has been updated.\033[0m" if ! diff .alphanet.sh.new "$0" >/dev/null 2>&1 ; then
mv .alphanet.sh.new "$0"
echo "\033[32mThe script has been updated.\033[0m"
else
rm .alphanet.sh.new
echo "\033[32mThe script is up to date.\033[0m"
fi
} }
usage() { usage() {
@ -470,6 +478,10 @@ case "$command" in
exec "$0" start "$@" exec "$0" start "$@"
;; ;;
clear) clear)
if check_container; then
echo "\033[31mCannot clear data while the container is running.\033[0m"
exit 1
fi
clear_volume clear_volume
;; ;;
status) status)

View File

@ -22,15 +22,3 @@ diff --git a/src/proto/alpha/constants_repr.ml b/src/proto/alpha/constants_repr.
max_signing_slot = 15 ; max_signing_slot = 15 ;
instructions_per_transaction = 16 * 1024 ; instructions_per_transaction = 16 * 1024 ;
proof_of_work_threshold = proof_of_work_threshold =
diff --git a/src/node/shell/distributed_db_message.ml b/src/node/shell/distributed_db_message.ml
--- a/src/node/shell/distributed_db_message.ml
+++ b/src/node/shell/distributed_db_message.ml
@@ -144,7 +144,7 @@ let encoding =
let versions =
let open P2p.Version in
- [ { name = "TEZOS" ;
+ [ { name = "TEZOS_ALPHANET" ;
major = 0 ;
minor = 5 ;
}

View File

@ -1,15 +1,3 @@
diff --git a/src/node/shell/distributed_db_message.ml b/src/node/shell/distributed_db_message.ml
--- a/src/node/shell/distributed_db_message.ml
+++ b/src/node/shell/distributed_db_message.ml
@@ -144,7 +144,7 @@ let encoding =
let versions =
let open P2p.Version in
- [ { name = "TEZOS_ALPHANET" ;
+ [ { name = "TEZOS_ALPHANET_NEXT" ;
major = 0 ;
minor = 5 ;
}
diff --git a/scripts/alphanet.sh b/scripts/alphanet.sh diff --git a/scripts/alphanet.sh b/scripts/alphanet.sh
--- a/scripts/alphanet.sh --- a/scripts/alphanet.sh
+++ b/scripts/alphanet.sh +++ b/scripts/alphanet.sh

View File

@ -28,12 +28,12 @@ wait_for_the_node_to_be_bootstraped() {
may_create_identity() { may_create_identity() {
if ! $client get balance "my_identity" >/dev/null 2>&1 ; then if ! $client get balance "my_identity" >/dev/null 2>&1 ; then
echo "Generating new manager key (known as 'my_identity')..." echo "Generating new manager key (known as 'my_identity')..."
$client gen keys my_identity $client gen keys "my_identity"
fi fi
if ! $client get balance "my_account" >/dev/null 2>&1 ; then if ! $client get balance "my_account" >/dev/null 2>&1 ; then
echo "Creating new account for 'my_identity' (known as 'my_account')..." echo "Creating new account for 'my_identity' (known as 'my_account')..."
$client forget contract my_account || true >/dev/null 2>&1 $client forget contract "my_account" >/dev/null 2>&1 || true
$client originate free account my_account for my_identity $client originate free account "my_account" for "my_identity"
fi fi
} }

View File

@ -7,20 +7,24 @@ node="${node:=tezos-node}"
client="${client:=tezos-client -base-dir \"$client_dir\"}" client="${client:=tezos-client -base-dir \"$client_dir\"}"
init() { init() {
if [ -f "$data_dir/alphanet_version" ] && \ if [ ! -f "$data_dir/alphanet_version" ] || \
[ "$(cat $data_dir/alphanet_version)" \ [ "$(cat "$data_dir/alphanet_version")" \
!= "$(cat ~/scripts/alphanet_version)" ]; then != "$(cat ~/scripts/alphanet_version)" ]; then
echo "\033[33mThe alphanet chain has been reset\033[0m" echo -e "\033[33mThe alphanet chain has been reset\033[0m"
mv "$node_dir/identity.json" /tmp mkdir -p "$data_dir/bak"
mv "$client_dir/public key hashs" /tmp mv "$node_dir/identity.json" \
mv "$client_dir/public keys" /tmp "$client_dir/public key hashs" \
mv "$client_dir/secret keys" /tmp "$client_dir/public keys" \
"$client_dir/secret keys" \
"$data_dir/bak"
rm -rf "$node_dir" "$client_dir" rm -rf "$node_dir" "$client_dir"
mkdir -p "$node_dir" "$client_dir" mkdir -p "$node_dir" "$client_dir"
mv "/tmp/identity.json" "$node_dir/" mv "$data_dir/bak/identity.json" "$node_dir/"
mv "/tmp/public key hashs" "$client_dir/" mv "$data_dir/bak/public key hashs" "$client_dir/"
mv "/tmp/public keys" "$client_dir/" mv "$data_dir/bak/public keys" "$client_dir/"
mv "/tmp/secret keys" "$client_dir/" mv "$data_dir/bak/secret keys" "$client_dir/"
rmdir "$data_dir/bak"
cp ~/scripts/alphanet_version "$data_dir/alphanet_version"
fi fi
if [ ! -f "$node_dir/config.json" ]; then if [ ! -f "$node_dir/config.json" ]; then
"$node" config init \ "$node" config init \

31
scripts/nginx.conf Normal file
View File

@ -0,0 +1,31 @@
# /etc/nginx/nginx.conf
user nginx;
worker_processes 1;
error_log off;
events {
worker_connections 1024;
}
http {
server_tokens off;
client_max_body_size 0;
keepalive_timeout 65;
tcp_nodelay on;
access_log off;
server {
listen 80 default_server;
listen [::]:80 default_server;
location / {
proxy_pass http://127.0.0.1:8732/;
}
location ~ ^/(validate_block|network/connection/.*/kick|network/connect/|(forge|inject)_(block|operation|protocol)/) {
return 404;
}
location = /404.html {
internal;
}
}
}

View File

@ -267,7 +267,7 @@ module Helpers = struct
let block cctxt block shell proto = let block cctxt block shell proto =
call_error_service1 cctxt call_error_service1 cctxt
Services.Helpers.Parse.block block Services.Helpers.Parse.block block
({ shell ; proto } : Updater.raw_block) ({ shell ; proto } : Updater.raw_block_header)
end end
end end

View File

@ -342,7 +342,7 @@ module Helpers : sig
proto_operation list tzresult Lwt.t proto_operation list tzresult Lwt.t
val block: val block:
Client_rpcs.config -> Client_rpcs.config ->
block -> Updater.shell_block -> MBytes.t -> block -> Updater.shell_block_header -> MBytes.t ->
Block.proto_header tzresult Lwt.t Block.proto_header tzresult Lwt.t
end end

View File

@ -150,20 +150,22 @@ module RPC = struct
test_network: Context.test_network; test_network: Context.test_network;
} }
let convert (block: State.Valid_block.t) = { let convert (block: State.Valid_block.t) =
hash = block.hash ; Lazy.force block.operation_hashes >>= fun operations ->
net_id = block.net_id ; Lwt.return {
level = block.level ; hash = block.hash ;
proto_level = block.proto_level ; net_id = block.net_id ;
predecessor = block.predecessor ; level = block.level ;
timestamp = block.timestamp ; proto_level = block.proto_level ;
operations_hash = block.operations_hash ; predecessor = block.predecessor ;
fitness = block.fitness ; timestamp = block.timestamp ;
data = block.proto_header ; operations_hash = block.operations_hash ;
operations = Some block.operations ; fitness = block.fitness ;
protocol = block.protocol_hash ; data = block.proto_header ;
test_network = block.test_network ; operations = Some operations ;
} protocol = block.protocol_hash ;
test_network = block.test_network ;
}
let inject_block node = node.inject_block let inject_block node = node.inject_block
let inject_operation node = node.inject_operation let inject_operation node = node.inject_operation
@ -174,7 +176,7 @@ module RPC = struct
| Some (net_db, _block) -> | Some (net_db, _block) ->
let net = Distributed_db.state net_db in let net = Distributed_db.state net_db in
State.Valid_block.read_exn net hash >>= fun block -> State.Valid_block.read_exn net hash >>= fun block ->
Lwt.return (convert block) convert block
| None -> | None ->
Lwt.fail Not_found Lwt.fail Not_found
@ -249,15 +251,15 @@ module RPC = struct
let block_info node (block: block) = let block_info node (block: block) =
match block with match block with
| `Genesis -> | `Genesis ->
State.Valid_block.Current.genesis node.mainnet_net >|= convert State.Valid_block.Current.genesis node.mainnet_net >>= convert
| ( `Head n | `Test_head n ) as block -> | ( `Head n | `Test_head n ) as block ->
let validator = get_validator node block in let validator = get_validator node block in
let net_db = Validator.net_db validator in let net_db = Validator.net_db validator in
let net_state = Validator.net_state validator in let net_state = Validator.net_state validator in
State.Valid_block.Current.head net_state >>= fun head -> State.Valid_block.Current.head net_state >>= fun head ->
get_pred net_db n head >|= convert get_pred net_db n head >>= convert
| `Hash h -> | `Hash h ->
read_valid_block_exn node h >|= convert read_valid_block_exn node h >>= convert
| ( `Prevalidation | `Test_prevalidation ) as block -> | ( `Prevalidation | `Test_prevalidation ) as block ->
let validator = get_validator node block in let validator = get_validator node block in
let pv = Validator.prevalidator validator in let pv = Validator.prevalidator validator in
@ -293,11 +295,24 @@ module RPC = struct
test_network ; test_network ;
} }
let rpc_context block : Updater.rpc_context = let rpc_context (block : State.Valid_block.t) : Updater.rpc_context =
{ context = block.State.Valid_block.context ; { block_hash = block.hash ;
level = Int32.succ block.level ; block_header = {
fitness = block.fitness ; shell = {
timestamp = block. timestamp } net_id = block.net_id ;
level = block.level ;
proto_level = block.proto_level ;
predecessor = block.predecessor ;
timestamp = block.timestamp ;
operations_hash = block.operations_hash ;
fitness = block.fitness ;
} ;
proto = block.proto_header ;
} ;
operation_hashes = (fun () -> Lazy.force block.operation_hashes) ;
operations = (fun () -> Lazy.force block.operations) ;
context = block.context ;
}
let get_rpc_context node block = let get_rpc_context node block =
match block with match block with
@ -317,38 +332,71 @@ module RPC = struct
| Some block -> Some (rpc_context block) | Some block -> Some (rpc_context block)
end end
| ( `Prevalidation | `Test_prevalidation ) as block -> | ( `Prevalidation | `Test_prevalidation ) as block ->
let validator, net = get_net node block in let validator, net_db = get_net node block in
let pv = Validator.prevalidator validator in let pv = Validator.prevalidator validator in
let net_state = Validator.net_state validator in
State.Valid_block.Current.head net_state >>= fun head ->
Prevalidator.context pv >>= function Prevalidator.context pv >>= function
| Error _ -> Lwt.fail Not_found | Error _ -> Lwt.fail Not_found
| Ok { context ; fitness } -> | Ok { context ; fitness } ->
let timestamp = Prevalidator.timestamp pv in Context.get_protocol context >>= fun protocol ->
State.Valid_block.Current.head let proto_level =
(Distributed_db.state net) >>= fun { level } -> if Protocol_hash.equal protocol head.protocol_hash then
let level = Int32.succ level in head.proto_level
Lwt.return (Some { Updater.context ; fitness ; timestamp ; level }) else
((head.proto_level + 1) mod 256) in
let operation_hashes =
let pv_result, _ = Prevalidator.operations pv in
[ pv_result.applied ] in
let operations_hash =
Operation_list_list_hash.compute
(List.map Operation_list_hash.compute operation_hashes) in
Lwt.return (Some {
Updater.block_hash = prevalidation_hash ;
block_header = {
shell = {
net_id = head.net_id ;
level = Int32.succ head.level ;
proto_level ;
predecessor = head.hash ;
timestamp = Prevalidator.timestamp pv ;
operations_hash ;
fitness ;
} ;
proto = MBytes.create 0 ;
} ;
operation_hashes = (fun () -> Lwt.return operation_hashes) ;
operations = begin fun () ->
Lwt_list.map_p
(Lwt_list.map_p
(Distributed_db.Operation.read_exn net_db))
operation_hashes
end ;
context ;
})
let operations node block = let operations node block =
match block with match block with
| `Genesis -> | `Genesis ->
State.Valid_block.Current.genesis node.mainnet_net >>= fun { operations } -> State.Valid_block.Current.genesis node.mainnet_net >>= fun { operation_hashes } ->
Lwt.return operations Lazy.force operation_hashes
| ( `Head n | `Test_head n ) as block -> | ( `Head n | `Test_head n ) as block ->
let validator = get_validator node block in let validator = get_validator node block in
let net_state = Validator.net_state validator in let net_state = Validator.net_state validator in
let net_db = Validator.net_db validator in let net_db = Validator.net_db validator in
State.Valid_block.Current.head net_state >>= fun head -> State.Valid_block.Current.head net_state >>= fun head ->
get_pred net_db n head >>= fun { operations } -> get_pred net_db n head >>= fun { operation_hashes } ->
Lwt.return operations Lazy.force operation_hashes
| (`Prevalidation | `Test_prevalidation) as block -> | (`Prevalidation | `Test_prevalidation) as block ->
let validator, _net = get_net node block in let validator, _net = get_net node block in
let pv = Validator.prevalidator validator in let pv = Validator.prevalidator validator in
let { Prevalidation.applied }, _ = Prevalidator.operations pv in let { Prevalidation.applied }, _ = Prevalidator.operations pv in
Lwt.return [applied] Lwt.return [applied]
| `Hash hash -> | `Hash hash ->
read_valid_block node hash >|= function read_valid_block node hash >>= function
| None -> [] | None -> Lwt.return_nil
| Some { operations } -> operations | Some { operation_hashes } ->
Lazy.force operation_hashes
let operation_content node hash = let operation_content node hash =
Distributed_db.read_operation node.distributed_db hash >>= fun op -> Distributed_db.read_operation node.distributed_db hash >>= fun op ->
@ -464,13 +512,12 @@ module RPC = struct
| Some (_, net_db) -> | Some (_, net_db) ->
State.Valid_block.known_heads (Distributed_db.state net_db) State.Valid_block.known_heads (Distributed_db.state net_db)
end >>= fun test_heads -> end >>= fun test_heads ->
let map = Lwt_list.fold_left_s
List.fold_left (fun map block ->
(fun map block -> convert block >|= fun bi ->
Block_hash.Map.add Block_hash.Map.add
block.State.Valid_block.hash (convert block) map) block.State.Valid_block.hash bi map)
Block_hash.Map.empty (test_heads @ heads) in Block_hash.Map.empty (test_heads @ heads)
Lwt.return map
let predecessors node len head = let predecessors node len head =
let rec loop net_db acc len hash (block: State.Block_header.t) = let rec loop net_db acc len hash (block: State.Block_header.t) =
@ -494,13 +541,13 @@ module RPC = struct
try try
let rec loop acc len hash = let rec loop acc len hash =
State.Valid_block.read_exn state hash >>= fun block -> State.Valid_block.read_exn state hash >>= fun block ->
let bi = convert block in convert block >>= fun bi ->
if Block_hash.equal bi.predecessor hash then if Block_hash.equal bi.predecessor hash then
Lwt.return (List.rev (bi :: acc)) Lwt.return (List.rev (bi :: acc))
else begin else begin
if len = 0 if len = 0
|| Block_hash.Set.mem hash ignored then || Block_hash.Set.mem hash ignored then
Lwt.return (List.rev acc) Lwt.return (List.rev acc)
else else
loop (bi :: acc) (len-1) bi.predecessor loop (bi :: acc) (len-1) bi.predecessor
end in end in
@ -513,12 +560,12 @@ module RPC = struct
Distributed_db.read_block_exn Distributed_db.read_block_exn
node.distributed_db head >>= fun (net_db, _block) -> node.distributed_db head >>= fun (net_db, _block) ->
let net_state = Distributed_db.state net_db in let net_state = Distributed_db.state net_db in
predecessors_bi net_state ignored len head >|= fun predecessors -> predecessors_bi net_state ignored len head >>= fun predecessors ->
let ignored = let ignored =
List.fold_right List.fold_right
(fun x s -> Block_hash.Set.add x.hash s) (fun x s -> Block_hash.Set.add x.hash s)
predecessors ignored in predecessors ignored in
ignored, predecessors :: acc Lwt.return (ignored, predecessors :: acc)
) )
(Block_hash.Set.empty, []) (Block_hash.Set.empty, [])
heads >>= fun (_, blocks) -> heads >>= fun (_, blocks) ->
@ -528,7 +575,7 @@ module RPC = struct
let valid_block_watcher node = let valid_block_watcher node =
let stream, shutdown = Validator.global_watcher node.validator in let stream, shutdown = Validator.global_watcher node.validator in
Lwt_stream.map (fun block -> convert block) stream, Lwt_stream.map_s (fun block -> convert block) stream,
shutdown shutdown
let operation_watcher node = let operation_watcher node =

View File

@ -116,7 +116,8 @@ and valid_block = {
timestamp: Time.t ; timestamp: Time.t ;
fitness: Protocol.fitness ; fitness: Protocol.fitness ;
operations_hash: Operation_list_list_hash.t ; operations_hash: Operation_list_list_hash.t ;
operations: Operation_hash.t list list ; operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
operations: Store.Operation.t list list Lwt.t Lazy.t ;
discovery_time: Time.t ; discovery_time: Time.t ;
protocol_hash: Protocol_hash.t ; protocol_hash: Protocol_hash.t ;
protocol: (module Updater.REGISTRED_PROTOCOL) option ; protocol: (module Updater.REGISTRED_PROTOCOL) option ;
@ -128,7 +129,7 @@ and valid_block = {
} }
let build_valid_block let build_valid_block
hash header operations hash header operation_hashes operations
context discovery_time successors invalid_successors = context discovery_time successors invalid_successors =
Context.get_protocol context >>= fun protocol_hash -> Context.get_protocol context >>= fun protocol_hash ->
Context.get_test_network context >>= fun test_network -> Context.get_test_network context >>= fun test_network ->
@ -142,6 +143,7 @@ let build_valid_block
timestamp = header.shell.timestamp ; timestamp = header.shell.timestamp ;
discovery_time ; discovery_time ;
operations_hash = header.shell.operations_hash ; operations_hash = header.shell.operations_hash ;
operation_hashes ;
operations ; operations ;
fitness = header.shell.fitness ; fitness = header.shell.fitness ;
protocol_hash ; protocol_hash ;
@ -724,6 +726,9 @@ module Block_header = struct
let read_operations s k = let read_operations s k =
Raw_operation_list.read_all s.block_header_store k Raw_operation_list.read_all s.block_header_store k
let read_operations_exn s k =
Raw_operation_list.read_all_exn s.block_header_store k
let mark_invalid net hash errors = let mark_invalid net hash errors =
mark_invalid net hash errors >>= fun marked -> mark_invalid net hash errors >>= fun marked ->
if not marked then if not marked then
@ -909,7 +914,8 @@ module Raw_net = struct
Lwt.return context Lwt.return context
end >>= fun context -> end >>= fun context ->
build_valid_block build_valid_block
genesis.block header [] context genesis.time genesis.block header (lazy Lwt.return_nil) (lazy Lwt.return_nil)
context genesis.time
Block_hash.Set.empty Block_hash.Set.empty >>= fun genesis_block -> Block_hash.Set.empty Block_hash.Set.empty >>= fun genesis_block ->
Lwt.return @@ Lwt.return @@
build build
@ -936,7 +942,8 @@ module Valid_block = struct
timestamp: Time.t ; timestamp: Time.t ;
fitness: Fitness.fitness ; fitness: Fitness.fitness ;
operations_hash: Operation_list_list_hash.t ; operations_hash: Operation_list_list_hash.t ;
operations: Operation_hash.t list list ; operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
operations: Store.Operation.t list list Lwt.t Lazy.t ;
discovery_time: Time.t ; discovery_time: Time.t ;
protocol_hash: Protocol_hash.t ; protocol_hash: Protocol_hash.t ;
protocol: (module Updater.REGISTRED_PROTOCOL) option ; protocol: (module Updater.REGISTRED_PROTOCOL) option ;
@ -953,7 +960,9 @@ module Valid_block = struct
let known { context_index } hash = let known { context_index } hash =
Context.exists context_index hash Context.exists context_index hash
let raw_read block operations time chain_store context_index hash = let raw_read
block operations operation_hashes
time chain_store context_index hash =
Context.checkout context_index hash >>= function Context.checkout context_index hash >>= function
| None -> | None ->
fail (Unknown_context hash) fail (Unknown_context hash)
@ -962,12 +971,15 @@ module Valid_block = struct
>>= fun successors -> >>= fun successors ->
Store.Chain.Invalid_successors.read_all (chain_store, hash) Store.Chain.Invalid_successors.read_all (chain_store, hash)
>>= fun invalid_successors -> >>= fun invalid_successors ->
build_valid_block hash block operations build_valid_block hash block operation_hashes operations
context time successors invalid_successors >>= fun block -> context time successors invalid_successors >>= fun block ->
return block return block
let raw_read_exn block operations time chain_store context_index hash = let raw_read_exn
raw_read block operations time chain_store context_index hash >>= function block operations operation_hashes
time chain_store context_index hash =
raw_read block operations operation_hashes
time chain_store context_index hash >>= function
| Error _ -> Lwt.fail Not_found | Error _ -> Lwt.fail Not_found
| Ok data -> Lwt.return data | Ok data -> Lwt.return data
@ -976,8 +988,17 @@ module Valid_block = struct
| None | Some { Time.data = Error _ } -> | None | Some { Time.data = Error _ } ->
fail (Unknown_block hash) fail (Unknown_block hash)
| Some { Time.data = Ok block ; time } -> | Some { Time.data = Ok block ; time } ->
Block_header.read_operations net hash >>=? fun operations -> let operation_hashes =
raw_read block operations lazy (Block_header.read_operations_exn net hash) in
let operations =
lazy (
Lazy.force operation_hashes >>= fun operations ->
Lwt_list.map_p
(Lwt_list.map_p
(Raw_operation.read_exn net.operation_store ))
operations)
in
raw_read block operations operation_hashes
time net_state.chain_store net_state.context_index hash time net_state.chain_store net_state.context_index hash
let read_opt net net_state hash = let read_opt net net_state hash =
@ -991,6 +1012,7 @@ module Valid_block = struct
| Ok data -> Lwt.return data | Ok data -> Lwt.return data
let store let store
operation_store
block_header_store block_header_store
(net_state: net_state) (net_state: net_state)
valid_block_watcher valid_block_watcher
@ -1011,8 +1033,6 @@ module Valid_block = struct
Raw_block_header.Locked.mark_valid Raw_block_header.Locked.mark_valid
block_header_store hash >>= fun _marked -> block_header_store hash >>= fun _marked ->
(* TODO fail if the block was previsouly stored ... ??? *) (* TODO fail if the block was previsouly stored ... ??? *)
Operation_list.Locked.read_all
block_header_store hash >>=? fun operations ->
(* Let's commit the context. *) (* Let's commit the context. *)
let message = let message =
match message with match message with
@ -1031,8 +1051,17 @@ module Valid_block = struct
Store.Chain.Valid_successors.store Store.Chain.Valid_successors.store
(store, predecessor) hash >>= fun () -> (store, predecessor) hash >>= fun () ->
(* Build the `valid_block` value. *) (* Build the `valid_block` value. *)
let operation_hashes =
lazy (Operation_list.Locked.read_all_exn block_header_store hash) in
let operations =
lazy (
Lazy.force operation_hashes >>= fun operations ->
Lwt_list.map_p
(Lwt_list.map_p
(Raw_operation.read_exn operation_store ))
operations) in
raw_read_exn raw_read_exn
block operations discovery_time block operations operation_hashes discovery_time
net_state.chain_store net_state.context_index hash >>= fun valid_block -> net_state.chain_store net_state.context_index hash >>= fun valid_block ->
Watcher.notify valid_block_watcher valid_block ; Watcher.notify valid_block_watcher valid_block ;
Lwt.return (Ok valid_block) Lwt.return (Ok valid_block)
@ -1067,7 +1096,7 @@ module Valid_block = struct
block_header_store hash >>= function block_header_store hash >>= function
| Some _ -> return None (* Previously invalidated block. *) | Some _ -> return None (* Previously invalidated block. *)
| None -> | None ->
Locked.store Locked.store net.operation_store
block_header_store net_state net.valid_block_watcher block_header_store net_state net.valid_block_watcher
hash vcontext >>=? fun valid_block -> hash vcontext >>=? fun valid_block ->
return (Some valid_block) return (Some valid_block)
@ -1328,7 +1357,8 @@ module Net = struct
Block_header.Locked.read_discovery_time block_header_store Block_header.Locked.read_discovery_time block_header_store
genesis_hash >>=? fun genesis_discovery_time -> genesis_hash >>=? fun genesis_discovery_time ->
Valid_block.Locked.raw_read Valid_block.Locked.raw_read
genesis_shell_header [] genesis_discovery_time genesis_shell_header (lazy Lwt.return_nil) (lazy Lwt.return_nil)
genesis_discovery_time
chain_store context_index genesis_hash >>=? fun genesis_block -> chain_store context_index genesis_hash >>=? fun genesis_block ->
return @@ return @@
Raw_net.build Raw_net.build

View File

@ -258,8 +258,9 @@ module Valid_block : sig
fitness: Protocol.fitness ; fitness: Protocol.fitness ;
(** The (validated) score of the block. *) (** The (validated) score of the block. *)
operations_hash: Operation_list_list_hash.t ; operations_hash: Operation_list_list_hash.t ;
operations: Operation_hash.t list list ; operation_hashes: Operation_hash.t list list Lwt.t Lazy.t ;
(** The sequence of operations ans its (Merkle-)hash. *) operations: Store.Operation.t list list Lwt.t Lazy.t ;
(** The sequence of operations and its (Merkle-)hash. *)
discovery_time: Time.t ; discovery_time: Time.t ;
(** The data at which the block was discorevered on the P2P network. *) (** The data at which the block was discorevered on the P2P network. *)
protocol_hash: Protocol_hash.t ; protocol_hash: Protocol_hash.t ;

View File

@ -350,16 +350,18 @@ module Context_db = struct
State.Valid_block.store net_state hash data >>=? function State.Valid_block.store net_state hash data >>=? function
| None -> | None ->
State.Valid_block.read net_state hash >>=? fun block -> State.Valid_block.read net_state hash >>=? fun block ->
Lazy.force block.operation_hashes >>= fun ophs ->
Lwt_list.iter_p Lwt_list.iter_p
(Lwt_list.iter_p (fun hash -> (Lwt_list.iter_p (fun hash ->
Distributed_db.Operation.commit net_db hash)) Distributed_db.Operation.commit net_db hash))
block.operations >>= fun () -> ophs >>= fun () ->
return (Ok block, false) return (Ok block, false)
| Some block -> | Some block ->
Lazy.force block.operation_hashes >>= fun ophs ->
Lwt_list.iter_p Lwt_list.iter_p
(Lwt_list.iter_p (fun hash -> (Lwt_list.iter_p (fun hash ->
Distributed_db.Operation.commit net_db hash)) Distributed_db.Operation.commit net_db hash))
block.operations >>= fun () -> ophs >>= fun () ->
return (Ok block, true) return (Ok block, true)
end end
| Error err -> | Error err ->

View File

@ -22,7 +22,7 @@ type raw_operation = Store.Operation.t = {
proto: MBytes.t ; proto: MBytes.t ;
} }
type shell_block = Store.Block_header.shell_header = type shell_block_header = Store.Block_header.shell_header =
{ net_id: Net_id.t ; { net_id: Net_id.t ;
level: Int32.t ; level: Int32.t ;
proto_level: int ; (* uint8 *) proto_level: int ; (* uint8 *)
@ -32,8 +32,8 @@ type shell_block = Store.Block_header.shell_header =
fitness: MBytes.t list ; fitness: MBytes.t list ;
} }
type raw_block = Store.Block_header.t = { type raw_block_header = Store.Block_header.t = {
shell: shell_block ; shell: shell_block_header ;
proto: MBytes.t ; proto: MBytes.t ;
} }
@ -44,10 +44,11 @@ type validation_result = {
} }
type rpc_context = { type rpc_context = {
block_hash: Block_hash.t ;
block_header: raw_block_header ;
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
operations: unit -> raw_operation list list Lwt.t ;
context: Context.t ; context: Context.t ;
level: Int32.t ;
timestamp: Time.t ;
fitness: Fitness.fitness ;
} }
module type PROTOCOL = sig module type PROTOCOL = sig
@ -70,13 +71,13 @@ module type PROTOCOL = sig
val precheck_block : val precheck_block :
ancestor_context: Context.t -> ancestor_context: Context.t ->
ancestor_timestamp: Time.t -> ancestor_timestamp: Time.t ->
raw_block -> raw_block_header ->
unit tzresult Lwt.t unit tzresult Lwt.t
val begin_application : val begin_application :
predecessor_context: Context.t -> predecessor_context: Context.t ->
predecessor_timestamp: Time.t -> predecessor_timestamp: Time.t ->
predecessor_fitness: Fitness.fitness -> predecessor_fitness: Fitness.fitness ->
raw_block -> raw_block_header ->
validation_state tzresult Lwt.t validation_state tzresult Lwt.t
val begin_construction : val begin_construction :
predecessor_context: Context.t -> predecessor_context: Context.t ->

View File

@ -11,19 +11,6 @@ open Logging.Updater
let (//) = Filename.concat let (//) = Filename.concat
type validation_result = Protocol.validation_result = {
context: Context.t ;
fitness: Fitness.fitness ;
message: string option ;
}
type rpc_context = Protocol.rpc_context = {
context: Context.t ;
level: Int32.t ;
timestamp: Time.t ;
fitness: Fitness.fitness ;
}
module type PROTOCOL = Protocol.PROTOCOL module type PROTOCOL = Protocol.PROTOCOL
module type REGISTRED_PROTOCOL = sig module type REGISTRED_PROTOCOL = sig
val hash: Protocol_hash.t val hash: Protocol_hash.t
@ -43,7 +30,7 @@ type raw_operation = Store.Operation.t = {
} }
let raw_operation_encoding = Store.Operation.encoding let raw_operation_encoding = Store.Operation.encoding
type shell_block = Store.Block_header.shell_header = { type shell_block_header = Store.Block_header.shell_header = {
net_id: Net_id.t ; net_id: Net_id.t ;
level: Int32.t ; level: Int32.t ;
proto_level: int ; (* uint8 *) proto_level: int ; (* uint8 *)
@ -52,13 +39,27 @@ type shell_block = Store.Block_header.shell_header = {
operations_hash: Operation_list_list_hash.t ; operations_hash: Operation_list_list_hash.t ;
fitness: MBytes.t list ; fitness: MBytes.t list ;
} }
let shell_block_encoding = Store.Block_header.shell_header_encoding let shell_block_header_encoding = Store.Block_header.shell_header_encoding
type raw_block = Store.Block_header.t = { type raw_block_header = Store.Block_header.t = {
shell: shell_block ; shell: shell_block_header ;
proto: MBytes.t ; proto: MBytes.t ;
} }
let raw_block_encoding = Store.Block_header.encoding let raw_block_header_encoding = Store.Block_header.encoding
type validation_result = Protocol.validation_result = {
context: Context.t ;
fitness: Fitness.fitness ;
message: string option ;
}
type rpc_context = Protocol.rpc_context = {
block_hash: Block_hash.t ;
block_header: Protocol.raw_block_header ;
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
operations: unit -> raw_operation list list Lwt.t ;
context: Context.t ;
}
(** Version table *) (** Version table *)

View File

@ -18,7 +18,7 @@ type raw_operation = Store.Operation.t = {
} }
val raw_operation_encoding: raw_operation Data_encoding.t val raw_operation_encoding: raw_operation Data_encoding.t
type shell_block = Store.Block_header.shell_header = { type shell_block_header = Store.Block_header.shell_header = {
net_id: Net_id.t ; net_id: Net_id.t ;
level: Int32.t ; level: Int32.t ;
proto_level: int ; (* uint8 *) proto_level: int ; (* uint8 *)
@ -27,13 +27,13 @@ type shell_block = Store.Block_header.shell_header = {
operations_hash: Operation_list_list_hash.t ; operations_hash: Operation_list_list_hash.t ;
fitness: MBytes.t list ; fitness: MBytes.t list ;
} }
val shell_block_encoding: shell_block Data_encoding.t val shell_block_header_encoding: shell_block_header Data_encoding.t
type raw_block = Store.Block_header.t = { type raw_block_header = Store.Block_header.t = {
shell: shell_block ; shell: shell_block_header ;
proto: MBytes.t ; proto: MBytes.t ;
} }
val raw_block_encoding: raw_block Data_encoding.t val raw_block_header_encoding: raw_block_header Data_encoding.t
type validation_result = Protocol.validation_result = { type validation_result = Protocol.validation_result = {
context: Context.t ; context: Context.t ;
@ -42,10 +42,11 @@ type validation_result = Protocol.validation_result = {
} }
type rpc_context = Protocol.rpc_context = { type rpc_context = Protocol.rpc_context = {
block_hash: Block_hash.t ;
block_header: raw_block_header ;
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
operations: unit -> raw_operation list list Lwt.t ;
context: Context.t ; context: Context.t ;
level: Int32.t ;
timestamp: Time.t ;
fitness: Fitness.fitness ;
} }
module type PROTOCOL = Protocol.PROTOCOL module type PROTOCOL = Protocol.PROTOCOL

View File

@ -13,7 +13,7 @@ open Tezos_hash
(** Exported type *) (** Exported type *)
type header = { type header = {
shell: Updater.shell_block ; shell: Updater.shell_block_header ;
proto: proto_header ; proto: proto_header ;
signature: Ed25519.Signature.t ; signature: Ed25519.Signature.t ;
} }
@ -46,7 +46,7 @@ let signed_proto_header_encoding =
let unsigned_header_encoding = let unsigned_header_encoding =
let open Data_encoding in let open Data_encoding in
merge_objs merge_objs
Updater.shell_block_encoding Updater.shell_block_header_encoding
proto_header_encoding proto_header_encoding
(** Constants *) (** Constants *)
@ -64,7 +64,7 @@ type error +=
let parse_header let parse_header
({ shell = { net_id ; level ; proto_level ; predecessor ; ({ shell = { net_id ; level ; proto_level ; predecessor ;
timestamp ; fitness ; operations_hash } ; timestamp ; fitness ; operations_hash } ;
proto } : Updater.raw_block) : header tzresult = proto } : Updater.raw_block_header) : header tzresult =
match Data_encoding.Binary.of_bytes signed_proto_header_encoding proto with match Data_encoding.Binary.of_bytes signed_proto_header_encoding proto with
| None -> Error [Cant_parse_proto_header] | None -> Error [Cant_parse_proto_header]
| Some (proto, signature) -> | Some (proto, signature) ->

View File

@ -11,7 +11,7 @@ open Tezos_hash
(** Exported type *) (** Exported type *)
type header = { type header = {
shell: Updater.shell_block ; shell: Updater.shell_block_header ;
proto: proto_header ; proto: proto_header ;
signature: Ed25519.Signature.t ; signature: Ed25519.Signature.t ;
} }
@ -26,16 +26,16 @@ and proto_header = {
val max_header_length: int val max_header_length: int
(** Parse the protocol-specific part of a block header. *) (** Parse the protocol-specific part of a block header. *)
val parse_header: Updater.raw_block -> header tzresult val parse_header: Updater.raw_block_header -> header tzresult
val proto_header_encoding: val proto_header_encoding:
proto_header Data_encoding.encoding proto_header Data_encoding.encoding
val unsigned_header_encoding: val unsigned_header_encoding:
(Updater.shell_block * proto_header) Data_encoding.encoding (Updater.shell_block_header * proto_header) Data_encoding.encoding
val forge_header: val forge_header:
Updater.shell_block -> proto_header -> MBytes.t Updater.shell_block_header -> proto_header -> MBytes.t
(** [forge_header shell_hdr proto_hdr] is the binary serialization (** [forge_header shell_hdr proto_hdr] is the binary serialization
(using [unsigned_header_encoding]) of a block header, (using [unsigned_header_encoding]) of a block header,
comprising both the shell and the protocol part of the header, comprising both the shell and the protocol part of the header,

View File

@ -321,6 +321,19 @@ end
type error += Cannot_parse_operation type error += Cannot_parse_operation
let encoding =
let open Data_encoding in
conv
(fun { hash ; shell ; contents ; signature } ->
(hash, (shell, (contents, signature))))
(fun (hash, (shell, (contents, signature))) ->
{ hash ; shell ; contents ; signature })
(merge_objs
(obj1 (req "hash" Operation_hash.encoding))
(merge_objs
Updater.shell_operation_encoding
Encoding.signed_proto_operation_encoding))
let () = let () =
register_error_kind register_error_kind
`Branch `Branch

View File

@ -83,6 +83,8 @@ and counter = Int32.t
type error += Cannot_parse_operation (* `Branch *) type error += Cannot_parse_operation (* `Branch *)
val encoding: operation Data_encoding.t
val parse: val parse:
Operation_hash.t -> Updater.raw_operation -> operation tzresult Operation_hash.t -> Updater.raw_operation -> operation tzresult

View File

@ -34,6 +34,15 @@ let wrap_tzerror encoding =
(fun x -> Error x) ; (fun x -> Error x) ;
] ]
let operations custom_root =
RPC.service
~description: "All the operations of the block (parsed)."
~input: empty
~output: (wrap_tzerror @@
(list (list (dynamic_size Operation.encoding))))
RPC.Path.(custom_root / "operations")
module Constants = struct module Constants = struct
let cycle_length custom_root = let cycle_length custom_root =
@ -592,7 +601,7 @@ module Helpers = struct
let block custom_root = let block custom_root =
RPC.service RPC.service
~description:"Parse a block" ~description:"Parse a block"
~input: Updater.raw_block_encoding ~input: Updater.raw_block_header_encoding
~output: (wrap_tzerror Block.proto_header_encoding) ~output: (wrap_tzerror Block.proto_header_encoding)
RPC.Path.(custom_root / "helpers" / "parse" / "block" ) RPC.Path.(custom_root / "helpers" / "parse" / "block" )

View File

@ -9,32 +9,65 @@
open Tezos_context open Tezos_context
let rpc_init { Updater.context ; level ; timestamp ; fitness } = type rpc_context = {
Tezos_context.init ~level ~timestamp ~fitness context block_hash: Block_hash.t ;
block_header: Updater.raw_block_header ;
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
operations: unit -> Updater.raw_operation list list Lwt.t ;
context: Tezos_context.t ;
}
let rpc_init
({ block_hash ; block_header ;
operation_hashes ; operations ; context } : Updater.rpc_context) =
let level = Int32.succ block_header.shell.level in
let timestamp = block_header.shell.timestamp in
let fitness = block_header.shell.fitness in
Tezos_context.init ~level ~timestamp ~fitness context >>=? fun context ->
return { block_hash ; block_header ; operation_hashes ; operations ; context }
let rpc_services = ref (RPC.empty : Updater.rpc_context RPC.directory) let rpc_services = ref (RPC.empty : Updater.rpc_context RPC.directory)
let register0 s f =
let register0_fullctxt s f =
rpc_services := rpc_services :=
RPC.register !rpc_services (s RPC.Path.root) RPC.register !rpc_services (s RPC.Path.root)
(fun ctxt () -> (fun ctxt () ->
( rpc_init ctxt >>=? fun ctxt -> ( rpc_init ctxt >>=? fun ctxt ->
f ctxt ) >>= RPC.Answer.return) f ctxt ) >>= RPC.Answer.return)
let register1 s f = let register0 s f = register0_fullctxt s (fun { context } -> f context)
let register1_fullctxt s f =
rpc_services := rpc_services :=
RPC.register !rpc_services (s RPC.Path.root) RPC.register !rpc_services (s RPC.Path.root)
(fun ctxt arg -> (fun ctxt arg ->
( rpc_init ctxt >>=? fun ctxt -> ( rpc_init ctxt >>=? fun ctxt ->
f ctxt arg ) >>= RPC.Answer.return) f ctxt arg ) >>= RPC.Answer.return)
let register2 s f = let register1 s f = register1_fullctxt s (fun { context } x -> f context x)
let register1_noctxt s f =
rpc_services :=
RPC.register !rpc_services (s RPC.Path.root)
(fun _ arg -> f arg >>= RPC.Answer.return)
let register2_fullctxt s f =
rpc_services := rpc_services :=
RPC.register !rpc_services (s RPC.Path.root) RPC.register !rpc_services (s RPC.Path.root)
(fun (ctxt, arg1) arg2 -> (fun (ctxt, arg1) arg2 ->
( rpc_init ctxt >>=? fun ctxt -> ( rpc_init ctxt >>=? fun ctxt ->
f ctxt arg1 arg2 ) >>= RPC.Answer.return) f ctxt arg1 arg2 ) >>= RPC.Answer.return)
let register1_noctxt s f = let register2 s f = register2_fullctxt s (fun { context } x y -> f context x y)
rpc_services :=
RPC.register !rpc_services (s RPC.Path.root)
(fun _ arg -> f arg >>= RPC.Answer.return) (*-- Operations --------------------------------------------------------------*)
let () =
register0_fullctxt
Services.operations
(fun { operation_hashes ; operations } ->
operation_hashes () >>= fun operation_hashes ->
operations () >>= fun operations ->
map2_s
(map2_s (fun x y -> Lwt.return (Operation.parse x y)))
operation_hashes operations)
(*-- Constants ---------------------------------------------------------------*) (*-- Constants ---------------------------------------------------------------*)
@ -146,7 +179,7 @@ let () =
rpc_services := rpc_services :=
RPC.register !rpc_services (s RPC.Path.root) RPC.register !rpc_services (s RPC.Path.root)
(fun (ctxt, contract) arg -> (fun (ctxt, contract) arg ->
( rpc_init ctxt >>=? fun ctxt -> ( rpc_init ctxt >>=? fun { context = ctxt } ->
Contract.exists ctxt contract >>=? function Contract.exists ctxt contract >>=? function
| true -> f ctxt contract arg | true -> f ctxt contract arg
| false -> raise Not_found ) >>= RPC.Answer.return) in | false -> raise Not_found ) >>= RPC.Answer.return) in

View File

@ -491,6 +491,8 @@ and counter = Int32.t
module Operation : sig module Operation : sig
val encoding: operation Data_encoding.t
type error += Cannot_parse_operation (* `Branch *) type error += Cannot_parse_operation (* `Branch *)
val parse: val parse:
Operation_hash.t -> Updater.raw_operation -> operation tzresult Operation_hash.t -> Updater.raw_operation -> operation tzresult
@ -517,7 +519,7 @@ end
module Block : sig module Block : sig
type header = { type header = {
shell: Updater.shell_block ; shell: Updater.shell_block_header ;
proto: proto_header ; proto: proto_header ;
signature: Ed25519.Signature.t ; signature: Ed25519.Signature.t ;
} }
@ -530,16 +532,16 @@ module Block : sig
val max_header_length: int val max_header_length: int
val parse_header: Updater.raw_block -> header tzresult val parse_header: Updater.raw_block_header -> header tzresult
val proto_header_encoding: val proto_header_encoding:
proto_header Data_encoding.encoding proto_header Data_encoding.encoding
val unsigned_header_encoding: val unsigned_header_encoding:
(Updater.shell_block * proto_header) Data_encoding.encoding (Updater.shell_block_header * proto_header) Data_encoding.encoding
val forge_header: val forge_header:
Updater.shell_block -> proto_header -> MBytes.t Updater.shell_block_header -> proto_header -> MBytes.t
end end

View File

@ -88,6 +88,16 @@ val iter_s : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t
(** A {!List.map} in the monad *) (** A {!List.map} in the monad *)
val map_s : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t val map_s : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t
val map_p : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t
(** A {!List.map2} in the monad *)
val map2 :
('a -> 'b -> 'c tzresult) -> 'a list -> 'b list -> 'c list tzresult
(** A {!List.map2} in the monad *)
val map2_s :
('a -> 'b -> 'c tzresult Lwt.t) -> 'a list -> 'b list ->
'c list tzresult Lwt.t
(** A {!List.map_filter} in the monad *) (** A {!List.map_filter} in the monad *)
val map_filter_s : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t val map_filter_s : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t

View File

@ -16,7 +16,7 @@ val raw_operation_encoding: raw_operation Data_encoding.t
(** The version agnostic toplevel structure of blocks. *) (** The version agnostic toplevel structure of blocks. *)
type shell_block = { type shell_block_header = {
net_id: Net_id.t ; net_id: Net_id.t ;
(** The genesis of the chain this block belongs to. *) (** The genesis of the chain this block belongs to. *)
level: Int32.t ; level: Int32.t ;
@ -34,13 +34,13 @@ type shell_block = {
of unsigned bytes. Ordered by length and then by contents of unsigned bytes. Ordered by length and then by contents
lexicographically. *) lexicographically. *)
} }
val shell_block_encoding: shell_block Data_encoding.t val shell_block_header_encoding: shell_block_header Data_encoding.t
type raw_block = { type raw_block_header = {
shell: shell_block ; shell: shell_block_header ;
proto: MBytes.t ; proto: MBytes.t ;
} }
val raw_block_encoding: raw_block Data_encoding.t val raw_block_header_encoding: raw_block_header Data_encoding.t
type validation_result = { type validation_result = {
context: Context.t ; context: Context.t ;
@ -49,10 +49,11 @@ type validation_result = {
} }
type rpc_context = { type rpc_context = {
block_hash: Block_hash.t ;
block_header: raw_block_header ;
operation_hashes: unit -> Operation_hash.t list list Lwt.t ;
operations: unit -> raw_operation list list Lwt.t ;
context: Context.t ; context: Context.t ;
level: Int32.t ;
timestamp: Time.t ;
fitness: Fitness.fitness ;
} }
(** This is the signature of a Tezos protocol implementation. It has (** This is the signature of a Tezos protocol implementation. It has
@ -104,12 +105,12 @@ module type PROTOCOL = sig
val precheck_block : val precheck_block :
ancestor_context: Context.t -> ancestor_context: Context.t ->
ancestor_timestamp: Time.t -> ancestor_timestamp: Time.t ->
raw_block -> raw_block_header ->
unit tzresult Lwt.t unit tzresult Lwt.t
(** The first step in a block validation sequence. Initializes a (** The first step in a block validation sequence. Initializes a
validation context for validating a block. Takes as argument the validation context for validating a block. Takes as argument the
{!raw_block} to initialize the context for this block, patching {!raw_block_header} to initialize the context for this block, patching
the context resulting of the application of the predecessor the context resulting of the application of the predecessor
block passed as parameter. The function {!precheck_block} may block passed as parameter. The function {!precheck_block} may
not have been called before [begin_application], so all the not have been called before [begin_application], so all the
@ -118,12 +119,12 @@ module type PROTOCOL = sig
predecessor_context: Context.t -> predecessor_context: Context.t ->
predecessor_timestamp: Time.t -> predecessor_timestamp: Time.t ->
predecessor_fitness: Fitness.fitness -> predecessor_fitness: Fitness.fitness ->
raw_block -> raw_block_header ->
validation_state tzresult Lwt.t validation_state tzresult Lwt.t
(** Initializes a validation context for constructing a new block (** Initializes a validation context for constructing a new block
(as opposed to validating an existing block). Since there is no (as opposed to validating an existing block). Since there is no
{!raw_block} header available, the parts that it provides are {!raw_block_header} header available, the parts that it provides are
passed as arguments (predecessor block hash, context resulting passed as arguments (predecessor block hash, context resulting
of the application of the predecessor block, and timestamp). *) of the application of the predecessor block, and timestamp). *)
val begin_construction : val begin_construction :

View File

@ -52,7 +52,7 @@ module Command = struct
let forge shell command = let forge shell command =
Data_encoding.Binary.to_bytes Data_encoding.Binary.to_bytes
(Data_encoding.tup2 Updater.shell_block_encoding encoding) (Data_encoding.tup2 Updater.shell_block_header_encoding encoding)
(shell, command) (shell, command)
end end

View File

@ -39,7 +39,7 @@ let compare_operations _ _ = 0
let max_number_of_operations = 0 let max_number_of_operations = 0
type block = { type block = {
shell: Updater.shell_block ; shell: Updater.shell_block_header ;
command: Data.Command.t ; command: Data.Command.t ;
signature: Ed25519.Signature.t ; signature: Ed25519.Signature.t ;
} }

View File

@ -106,7 +106,7 @@ module Mining : sig
val mine_stamp : val mine_stamp :
Client_proto_rpcs.block -> Client_proto_rpcs.block ->
secret_key -> secret_key ->
Updater.shell_block -> Updater.shell_block_header ->
int -> int ->
Nonce_hash.t -> Nonce_hash.t ->
MBytes.t tzresult Lwt.t MBytes.t tzresult Lwt.t