Shell: move shell errors into shell services
Allows the client to display shell errors properly. Also adds some missing registrations, documentation and comments.
This commit is contained in:
parent
08c430c78f
commit
1171340a9f
@ -16,6 +16,8 @@ and raw = Block_header.t * Block_hash.t list
|
|||||||
|
|
||||||
let raw x = x
|
let raw x = x
|
||||||
|
|
||||||
|
let pp ppf loc = Block_header.pp ppf (fst loc)
|
||||||
|
|
||||||
let encoding =
|
let encoding =
|
||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
(* TODO add a [description] *)
|
(* TODO add a [description] *)
|
||||||
|
@ -15,6 +15,8 @@ and raw = Block_header.t * Block_hash.t list
|
|||||||
|
|
||||||
val raw: t -> raw
|
val raw: t -> raw
|
||||||
|
|
||||||
|
val pp : Format.formatter -> t -> unit
|
||||||
|
|
||||||
val encoding: t Data_encoding.t
|
val encoding: t Data_encoding.t
|
||||||
|
|
||||||
val compute: predecessor: (Block_hash.t -> int -> Block_hash.t option Lwt.t) ->
|
val compute: predecessor: (Block_hash.t -> int -> Block_hash.t option Lwt.t) ->
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
include Logging.Make(struct let name = "node.validator.bootstrap_pipeline" end)
|
include Logging.Make(struct let name = "node.validator.bootstrap_pipeline" end)
|
||||||
|
|
||||||
type error += Invalid_locator of P2p_peer.Id.t * Block_locator.t
|
open Validation_errors
|
||||||
|
|
||||||
type t = {
|
type t = {
|
||||||
canceler: Lwt_canceler.t ;
|
canceler: Lwt_canceler.t ;
|
||||||
|
@ -9,8 +9,6 @@
|
|||||||
|
|
||||||
type t
|
type t
|
||||||
|
|
||||||
type error += Invalid_locator of P2p_peer.Id.t * Block_locator.t
|
|
||||||
|
|
||||||
val create:
|
val create:
|
||||||
?notify_new_block: (State.Block.t -> unit) ->
|
?notify_new_block: (State.Block.t -> unit) ->
|
||||||
block_header_timeout:float ->
|
block_header_timeout:float ->
|
||||||
|
@ -149,7 +149,9 @@ end = struct
|
|||||||
type error += Timeout of key
|
type error += Timeout of key
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
Error_monad.register_error_kind `Permanent
|
(* Missing data key *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
~id: ("distributed_db." ^ Hash.name ^ ".missing")
|
~id: ("distributed_db." ^ Hash.name ^ ".missing")
|
||||||
~title: ("Missing " ^ Hash.name)
|
~title: ("Missing " ^ Hash.name)
|
||||||
~description: ("Some " ^ Hash.name ^ " is missing from the distributed db")
|
~description: ("Some " ^ Hash.name ^ " is missing from the distributed db")
|
||||||
@ -158,6 +160,7 @@ end = struct
|
|||||||
(Data_encoding.obj1 (Data_encoding.req "key" Hash.encoding))
|
(Data_encoding.obj1 (Data_encoding.req "key" Hash.encoding))
|
||||||
(function Missing_data key -> Some key | _ -> None)
|
(function Missing_data key -> Some key | _ -> None)
|
||||||
(fun key -> Missing_data key) ;
|
(fun key -> Missing_data key) ;
|
||||||
|
(* Canceled key *)
|
||||||
register_error_kind
|
register_error_kind
|
||||||
`Permanent
|
`Permanent
|
||||||
~title: ("Canceled fetch of a " ^ Hash.name)
|
~title: ("Canceled fetch of a " ^ Hash.name)
|
||||||
@ -168,6 +171,7 @@ end = struct
|
|||||||
Data_encoding.(obj1 (req "key" Hash.encoding))
|
Data_encoding.(obj1 (req "key" Hash.encoding))
|
||||||
(function (Canceled key) -> Some key | _ -> None)
|
(function (Canceled key) -> Some key | _ -> None)
|
||||||
(fun key -> Canceled key) ;
|
(fun key -> Canceled key) ;
|
||||||
|
(* Timeout key *)
|
||||||
register_error_kind
|
register_error_kind
|
||||||
`Permanent
|
`Permanent
|
||||||
~title: ("Timed out fetch of a " ^ Hash.name)
|
~title: ("Timed out fetch of a " ^ Hash.name)
|
||||||
|
@ -81,10 +81,6 @@ type t = Worker.dropbox Worker.t
|
|||||||
let debug w =
|
let debug w =
|
||||||
Format.kasprintf (fun msg -> Worker.record_event w (Debug msg))
|
Format.kasprintf (fun msg -> Worker.record_event w (Debug msg))
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Unknown_ancestor
|
|
||||||
| Known_invalid
|
|
||||||
|
|
||||||
let set_bootstrapped pv =
|
let set_bootstrapped pv =
|
||||||
if not pv.bootstrapped then begin
|
if not pv.bootstrapped then begin
|
||||||
pv.bootstrapped <- true ;
|
pv.bootstrapped <- true ;
|
||||||
@ -194,7 +190,7 @@ let may_validate_new_head w hash header =
|
|||||||
"ignoring known invalid block %a from peer %a"
|
"ignoring known invalid block %a from peer %a"
|
||||||
Block_hash.pp_short hash
|
Block_hash.pp_short hash
|
||||||
P2p_peer.Id.pp_short pv.peer_id ;
|
P2p_peer.Id.pp_short pv.peer_id ;
|
||||||
fail Known_invalid
|
fail Validation_errors.Known_invalid
|
||||||
end
|
end
|
||||||
| false ->
|
| false ->
|
||||||
only_if_fitness_increases w header @@ fun () ->
|
only_if_fitness_increases w header @@ fun () ->
|
||||||
@ -211,7 +207,7 @@ let may_validate_new_branch w distant_hash locator =
|
|||||||
"ignoring branch %a without common ancestor from peer: %a."
|
"ignoring branch %a without common ancestor from peer: %a."
|
||||||
Block_hash.pp_short distant_hash
|
Block_hash.pp_short distant_hash
|
||||||
P2p_peer.Id.pp_short pv.peer_id ;
|
P2p_peer.Id.pp_short pv.peer_id ;
|
||||||
fail Unknown_ancestor
|
fail Validation_errors.Unknown_ancestor
|
||||||
| Some (ancestor, unknown_prefix) ->
|
| Some (ancestor, unknown_prefix) ->
|
||||||
bootstrap_new_branch w ancestor distant_header unknown_prefix
|
bootstrap_new_branch w ancestor distant_header unknown_prefix
|
||||||
|
|
||||||
@ -246,8 +242,8 @@ let on_completion w r _ st =
|
|||||||
let on_error w r st errs =
|
let on_error w r st errs =
|
||||||
let pv = Worker.state w in
|
let pv = Worker.state w in
|
||||||
match errs with
|
match errs with
|
||||||
((( Unknown_ancestor
|
((( Validation_errors.Unknown_ancestor
|
||||||
| Bootstrap_pipeline.Invalid_locator _
|
| Validation_errors.Invalid_locator _
|
||||||
| Block_validator_errors.Invalid_block _ ) :: _) as errors ) ->
|
| Block_validator_errors.Invalid_block _ ) :: _) as errors ) ->
|
||||||
(* TODO ban the peer_id... *)
|
(* TODO ban the peer_id... *)
|
||||||
debug w
|
debug w
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
open Preapply_result
|
open Preapply_result
|
||||||
|
open Validation_errors
|
||||||
|
|
||||||
let rec apply_operations apply_operation state r max_ops ~sort ops =
|
let rec apply_operations apply_operation state r max_ops ~sort ops =
|
||||||
Lwt_list.fold_left_s
|
Lwt_list.fold_left_s
|
||||||
@ -96,32 +97,6 @@ let start_prevalidation
|
|||||||
return (State { proto = (module Proto) ; state ;
|
return (State { proto = (module Proto) ; state ;
|
||||||
max_number_of_operations ; max_operation_data_length })
|
max_number_of_operations ; max_operation_data_length })
|
||||||
|
|
||||||
type error += Parse_error
|
|
||||||
type error += Too_many_operations
|
|
||||||
type error += Oversized_operation of { size: int ; max: int }
|
|
||||||
|
|
||||||
let () =
|
|
||||||
register_error_kind `Temporary
|
|
||||||
~id:"prevalidation.too_many_operations"
|
|
||||||
~title:"Too many pending operations in prevalidation"
|
|
||||||
~description:"The prevalidation context is full."
|
|
||||||
~pp:(fun ppf () ->
|
|
||||||
Format.fprintf ppf "Too many operation in prevalidation context.")
|
|
||||||
Data_encoding.empty
|
|
||||||
(function Too_many_operations -> Some () | _ -> None)
|
|
||||||
(fun () -> Too_many_operations) ;
|
|
||||||
register_error_kind `Permanent
|
|
||||||
~id:"prevalidation.oversized_operation"
|
|
||||||
~title:"Oversized operation"
|
|
||||||
~description:"The operation size is bigger than allowed."
|
|
||||||
~pp:(fun ppf (size, max) ->
|
|
||||||
Format.fprintf ppf "Oversized operation (size: %d, max: %d)"
|
|
||||||
size max)
|
|
||||||
Data_encoding.(obj2
|
|
||||||
(req "size" int31)
|
|
||||||
(req "max_size" int31))
|
|
||||||
(function Oversized_operation { size ; max } -> Some (size, max) | _ -> None)
|
|
||||||
(fun (size, max) -> Oversized_operation { size ; max })
|
|
||||||
|
|
||||||
let prevalidate
|
let prevalidate
|
||||||
(State { proto = (module Proto) ; state ;
|
(State { proto = (module Proto) ; state ;
|
||||||
|
@ -7,6 +7,8 @@
|
|||||||
(* *)
|
(* *)
|
||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
|
open Validation_errors
|
||||||
|
|
||||||
include Logging.Make(struct let name = "node.validator.block" end)
|
include Logging.Make(struct let name = "node.validator.block" end)
|
||||||
|
|
||||||
type 'a request =
|
type 'a request =
|
||||||
@ -26,56 +28,6 @@ type t = {
|
|||||||
|
|
||||||
(** Block validation *)
|
(** Block validation *)
|
||||||
|
|
||||||
type protocol_error =
|
|
||||||
| Compilation_failed
|
|
||||||
| Dynlinking_failed
|
|
||||||
|
|
||||||
let protocol_error_encoding =
|
|
||||||
let open Data_encoding in
|
|
||||||
union
|
|
||||||
[
|
|
||||||
case (Tag 0)
|
|
||||||
(obj1
|
|
||||||
(req "error" (constant "compilation_failed")))
|
|
||||||
(function Compilation_failed -> Some ()
|
|
||||||
| _ -> None)
|
|
||||||
(fun () -> Compilation_failed) ;
|
|
||||||
case (Tag 1)
|
|
||||||
(obj1
|
|
||||||
(req "error" (constant "dynlinking_failed")))
|
|
||||||
(function Dynlinking_failed -> Some ()
|
|
||||||
| _ -> None)
|
|
||||||
(fun () -> Dynlinking_failed) ;
|
|
||||||
]
|
|
||||||
|
|
||||||
let pp_protocol_error ppf = function
|
|
||||||
| Compilation_failed ->
|
|
||||||
Format.fprintf ppf "compilation error"
|
|
||||||
| Dynlinking_failed ->
|
|
||||||
Format.fprintf ppf "dynlinking error"
|
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Invalid_protocol of { hash: Protocol_hash.t ; error: protocol_error }
|
|
||||||
|
|
||||||
let () =
|
|
||||||
Error_monad.register_error_kind
|
|
||||||
`Permanent
|
|
||||||
~id:"validator.invalid_protocol"
|
|
||||||
~title:"Invalid protocol"
|
|
||||||
~description:"Invalid protocol."
|
|
||||||
~pp:begin fun ppf (protocol, error) ->
|
|
||||||
Format.fprintf ppf
|
|
||||||
"@[<v 2>Invalid protocol %a@ %a@]"
|
|
||||||
Protocol_hash.pp_short protocol pp_protocol_error error
|
|
||||||
end
|
|
||||||
Data_encoding.(merge_objs
|
|
||||||
(obj1 (req "invalid_protocol" Protocol_hash.encoding))
|
|
||||||
protocol_error_encoding)
|
|
||||||
(function Invalid_protocol { hash ; error } ->
|
|
||||||
Some (hash, error) | _ -> None)
|
|
||||||
(fun (hash, error) ->
|
|
||||||
Invalid_protocol { hash ; error })
|
|
||||||
|
|
||||||
let rec worker_loop bv =
|
let rec worker_loop bv =
|
||||||
begin
|
begin
|
||||||
protect ~canceler:bv.canceler begin fun () ->
|
protect ~canceler:bv.canceler begin fun () ->
|
||||||
|
@ -9,14 +9,6 @@
|
|||||||
|
|
||||||
type t
|
type t
|
||||||
|
|
||||||
type protocol_error =
|
|
||||||
| Compilation_failed
|
|
||||||
| Dynlinking_failed
|
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Invalid_protocol of
|
|
||||||
{ hash: Protocol_hash.t ; error: protocol_error }
|
|
||||||
|
|
||||||
val create: Distributed_db.t -> t
|
val create: Distributed_db.t -> t
|
||||||
|
|
||||||
val validate:
|
val validate:
|
||||||
|
@ -8,51 +8,7 @@
|
|||||||
(**************************************************************************)
|
(**************************************************************************)
|
||||||
|
|
||||||
open Logging.Node.State
|
open Logging.Node.State
|
||||||
|
open Validation_errors
|
||||||
type error +=
|
|
||||||
| Unknown_chain of Chain_id.t
|
|
||||||
|
|
||||||
type error += Bad_data_dir
|
|
||||||
|
|
||||||
type error += Block_not_invalid of Block_hash.t
|
|
||||||
|
|
||||||
let () =
|
|
||||||
let open Error_monad in
|
|
||||||
register_error_kind
|
|
||||||
`Temporary
|
|
||||||
~id:"state.unknown_chain"
|
|
||||||
~title:"Unknown chain"
|
|
||||||
~description:"TODO"
|
|
||||||
~pp:(fun ppf id ->
|
|
||||||
Format.fprintf ppf "Unknown chain %a" Chain_id.pp id)
|
|
||||||
Data_encoding.(obj1 (req "chain" Chain_id.encoding))
|
|
||||||
(function Unknown_chain x -> Some x | _ -> None)
|
|
||||||
(fun x -> Unknown_chain x) ;
|
|
||||||
register_error_kind
|
|
||||||
`Permanent
|
|
||||||
~id:"badDataDir"
|
|
||||||
~title:"Bad data directory"
|
|
||||||
~description:"The data directory could not be read. \
|
|
||||||
This could be because it was generated with an \
|
|
||||||
old version of the tezos-node program. \
|
|
||||||
Deleting and regenerating this directory \
|
|
||||||
may fix the problem."
|
|
||||||
Data_encoding.empty
|
|
||||||
(function Bad_data_dir -> Some () | _ -> None)
|
|
||||||
(fun () -> Bad_data_dir) ;
|
|
||||||
register_error_kind
|
|
||||||
`Permanent
|
|
||||||
~id:"blockNotInvalid"
|
|
||||||
~title:"Block not invalid"
|
|
||||||
~description:"The invalid block to be unmarked was not actually invalid."
|
|
||||||
~pp:(fun ppf block ->
|
|
||||||
Format.fprintf ppf "Block %a was expected to be invalid, but was not actually invalid."
|
|
||||||
Block_hash.pp block)
|
|
||||||
Data_encoding.(obj1 (req "block" Block_hash.encoding))
|
|
||||||
(function Block_not_invalid block -> Some block | _ -> None)
|
|
||||||
(fun block -> Block_not_invalid block) ;
|
|
||||||
|
|
||||||
(** *)
|
|
||||||
|
|
||||||
module Shared = struct
|
module Shared = struct
|
||||||
type 'a t = {
|
type 'a t = {
|
||||||
@ -545,28 +501,6 @@ module Block = struct
|
|||||||
Shared.use chain.block_store (fun store ->
|
Shared.use chain.block_store (fun store ->
|
||||||
predecessor_n store b distance)
|
predecessor_n store b distance)
|
||||||
|
|
||||||
|
|
||||||
type error += Inconsistent_hash of Context_hash.t * Context_hash.t
|
|
||||||
|
|
||||||
let () =
|
|
||||||
Error_monad.register_error_kind
|
|
||||||
`Permanent
|
|
||||||
~id:"inconsistentContextHash"
|
|
||||||
~title:"Inconsistent commit hash"
|
|
||||||
~description:
|
|
||||||
"When commiting the context of a block, the announced context \
|
|
||||||
hash was not the one computed at commit time."
|
|
||||||
~pp: (fun ppf (got, exp) ->
|
|
||||||
Format.fprintf ppf
|
|
||||||
"@[<v 2>Inconsistant hash:@ got: %a@ expected: %a"
|
|
||||||
Context_hash.pp got
|
|
||||||
Context_hash.pp exp)
|
|
||||||
Data_encoding.(obj2
|
|
||||||
(req "wrong_context_hash" Context_hash.encoding)
|
|
||||||
(req "expected_context_hash" Context_hash.encoding))
|
|
||||||
(function Inconsistent_hash (got, exp) -> Some (got, exp) | _ -> None)
|
|
||||||
(fun (got, exp) -> Inconsistent_hash (got, exp))
|
|
||||||
|
|
||||||
let store
|
let store
|
||||||
?(dont_enforce_context_hash = false)
|
?(dont_enforce_context_hash = false)
|
||||||
chain_state block_header operations
|
chain_state block_header operations
|
||||||
|
@ -31,12 +31,6 @@ val read:
|
|||||||
val close:
|
val close:
|
||||||
global_state -> unit Lwt.t
|
global_state -> unit Lwt.t
|
||||||
|
|
||||||
(** {2 Errors} **************************************************************)
|
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Unknown_chain of Chain_id.t
|
|
||||||
|
|
||||||
|
|
||||||
(** {2 Network} ************************************************************)
|
(** {2 Network} ************************************************************)
|
||||||
|
|
||||||
(** Data specific to a given chain (e.g the main chain or the current
|
(** Data specific to a given chain (e.g the main chain or the current
|
||||||
@ -105,8 +99,6 @@ module Block : sig
|
|||||||
val read_opt: Chain.t -> Block_hash.t -> block option Lwt.t
|
val read_opt: Chain.t -> Block_hash.t -> block option Lwt.t
|
||||||
val read_exn: Chain.t -> Block_hash.t -> block Lwt.t
|
val read_exn: Chain.t -> Block_hash.t -> block Lwt.t
|
||||||
|
|
||||||
type error += Inconsistent_hash of Context_hash.t * Context_hash.t
|
|
||||||
|
|
||||||
val store:
|
val store:
|
||||||
?dont_enforce_context_hash:bool ->
|
?dont_enforce_context_hash:bool ->
|
||||||
Chain.t ->
|
Chain.t ->
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
tezos-storage
|
tezos-storage
|
||||||
tezos-protocol-updater
|
tezos-protocol-updater
|
||||||
tezos-shell
|
tezos-shell
|
||||||
|
tezos-shell-services
|
||||||
tezos-embedded-protocol-demo
|
tezos-embedded-protocol-demo
|
||||||
tezos-stdlib-unix
|
tezos-stdlib-unix
|
||||||
alcotest-lwt))
|
alcotest-lwt))
|
||||||
@ -16,6 +17,7 @@
|
|||||||
-open Tezos_storage
|
-open Tezos_storage
|
||||||
-open Tezos_protocol_updater
|
-open Tezos_protocol_updater
|
||||||
-open Tezos_shell
|
-open Tezos_shell
|
||||||
|
-open Tezos_shell_services
|
||||||
-open Tezos_stdlib_unix))))
|
-open Tezos_stdlib_unix))))
|
||||||
|
|
||||||
(alias
|
(alias
|
||||||
|
@ -101,7 +101,7 @@ let build_valid_chain state vtbl pred names =
|
|||||||
return vblock
|
return vblock
|
||||||
end >>= function
|
end >>= function
|
||||||
| Ok v -> Lwt.return v
|
| Ok v -> Lwt.return v
|
||||||
| Error [ State.Block.Inconsistent_hash (got, _) ] ->
|
| Error [ Validation_errors.Inconsistent_hash (got, _) ] ->
|
||||||
(* Kind of a hack, but at least it tests idempotence to some extent. *)
|
(* Kind of a hack, but at least it tests idempotence to some extent. *)
|
||||||
attempt (Some got)
|
attempt (Some got)
|
||||||
| Error err ->
|
| Error err ->
|
||||||
|
@ -55,26 +55,9 @@ let activate v ?max_child_ttl chain_state =
|
|||||||
let get_exn { active_chains } chain_id =
|
let get_exn { active_chains } chain_id =
|
||||||
Chain_id.Table.find active_chains chain_id
|
Chain_id.Table.find active_chains chain_id
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Inactive_chain of Chain_id.t
|
|
||||||
|
|
||||||
let () =
|
|
||||||
register_error_kind `Branch
|
|
||||||
~id: "node.validator.inactive_chain"
|
|
||||||
~title: "Inactive chain"
|
|
||||||
~description: "Attempted validation of a block from an inactive chain."
|
|
||||||
~pp: (fun ppf chain ->
|
|
||||||
Format.fprintf ppf
|
|
||||||
"Tried to validate a block from chain %a, \
|
|
||||||
that is not currently considered active."
|
|
||||||
Chain_id.pp chain)
|
|
||||||
Data_encoding.(obj1 (req "inactive_chain" Chain_id.encoding))
|
|
||||||
(function Inactive_chain chain -> Some chain | _ -> None)
|
|
||||||
(fun chain -> Inactive_chain chain)
|
|
||||||
|
|
||||||
let get v chain_id =
|
let get v chain_id =
|
||||||
try get_exn v chain_id >>= fun nv -> return nv
|
try get_exn v chain_id >>= fun nv -> return nv
|
||||||
with Not_found -> fail (Inactive_chain chain_id)
|
with Not_found -> fail (Validation_errors.Inactive_chain chain_id)
|
||||||
|
|
||||||
let validate_block v ?(force = false) ?chain_id bytes operations =
|
let validate_block v ?(force = false) ?chain_id bytes operations =
|
||||||
let hash = Block_hash.hash_bytes [bytes] in
|
let hash = Block_hash.hash_bytes [bytes] in
|
||||||
|
@ -27,8 +27,6 @@ val activate:
|
|||||||
?max_child_ttl:int ->
|
?max_child_ttl:int ->
|
||||||
State.Chain.t -> Chain_validator.t Lwt.t
|
State.Chain.t -> Chain_validator.t Lwt.t
|
||||||
|
|
||||||
type error +=
|
|
||||||
| Inactive_chain of Chain_id.t
|
|
||||||
val get: t -> Chain_id.t -> Chain_validator.t tzresult Lwt.t
|
val get: t -> Chain_id.t -> Chain_validator.t tzresult Lwt.t
|
||||||
val get_exn: t -> Chain_id.t -> Chain_validator.t Lwt.t
|
val get_exn: t -> Chain_id.t -> Chain_validator.t Lwt.t
|
||||||
|
|
||||||
|
246
src/lib_shell_services/validation_errors.ml
Normal file
246
src/lib_shell_services/validation_errors.ml
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
(**************************************************************************)
|
||||||
|
(* *)
|
||||||
|
(* Copyright (c) 2014 - 2018. *)
|
||||||
|
(* Dynamic Ledger Solutions, Inc. <contact@tezos.com> *)
|
||||||
|
(* *)
|
||||||
|
(* All rights reserved. No warranty, explicit or implicit, provided. *)
|
||||||
|
(* *)
|
||||||
|
(**************************************************************************)
|
||||||
|
|
||||||
|
(***************** Prevalidation errors ***********************************)
|
||||||
|
|
||||||
|
type error += Parse_error
|
||||||
|
type error += Too_many_operations
|
||||||
|
type error += Oversized_operation of { size: int ; max: int }
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Parse error *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"prevalidation.parse_error"
|
||||||
|
~title:"Parsing error in prevalidation"
|
||||||
|
~description:"Raised when an operation has not been parsed correctly during prevalidation."
|
||||||
|
~pp:(fun ppf () ->
|
||||||
|
Format.fprintf ppf "Operation parsing error in prevalidation.")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Parse_error -> Some () | _ -> None)
|
||||||
|
(fun () -> Parse_error) ;
|
||||||
|
(* Too many operations *)
|
||||||
|
register_error_kind
|
||||||
|
`Temporary
|
||||||
|
~id:"prevalidation.too_many_operations"
|
||||||
|
~title:"Too many pending operations in prevalidation"
|
||||||
|
~description:"The prevalidation context is full."
|
||||||
|
~pp:(fun ppf () ->
|
||||||
|
Format.fprintf ppf "Too many operations in prevalidation context.")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Too_many_operations -> Some () | _ -> None)
|
||||||
|
(fun () -> Too_many_operations) ;
|
||||||
|
(* Oversized operation *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"prevalidation.oversized_operation"
|
||||||
|
~title:"Oversized operation"
|
||||||
|
~description:"The operation size is bigger than allowed."
|
||||||
|
~pp:(fun ppf (size, max) ->
|
||||||
|
Format.fprintf ppf "Oversized operation (size: %d, max: %d)"
|
||||||
|
size max)
|
||||||
|
Data_encoding.(obj2
|
||||||
|
(req "size" int31)
|
||||||
|
(req "max_size" int31))
|
||||||
|
(function Oversized_operation { size ; max } -> Some (size, max) | _ -> None)
|
||||||
|
(fun (size, max) -> Oversized_operation { size ; max })
|
||||||
|
|
||||||
|
|
||||||
|
(************************* State errors ***********************************)
|
||||||
|
|
||||||
|
type error += Unknown_chain of Chain_id.t
|
||||||
|
|
||||||
|
type error += Bad_data_dir
|
||||||
|
|
||||||
|
type error += Block_not_invalid of Block_hash.t
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Unknown network *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"state.unknown_chain"
|
||||||
|
~title:"Unknown chain"
|
||||||
|
~description:"The chain identifier could not be found in \
|
||||||
|
the chain identifiers table."
|
||||||
|
~pp:(fun ppf id ->
|
||||||
|
Format.fprintf ppf "Unknown chain %a" Chain_id.pp id)
|
||||||
|
Data_encoding.(obj1 (req "chain" Chain_id.encoding))
|
||||||
|
(function Unknown_chain x -> Some x | _ -> None)
|
||||||
|
(fun x -> Unknown_chain x) ;
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"badDataDir"
|
||||||
|
~title:"Bad data directory"
|
||||||
|
~description:"The data directory could not be read. \
|
||||||
|
This could be because it was generated with an \
|
||||||
|
old version of the tezos-node program. \
|
||||||
|
Deleting and regenerating this directory \
|
||||||
|
may fix the problem."
|
||||||
|
~pp:(fun ppf () -> Format.fprintf ppf "Bad data directory.")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Bad_data_dir -> Some () | _ -> None)
|
||||||
|
(fun () -> Bad_data_dir) ;
|
||||||
|
(* Block not invalid *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"blockNotInvalid"
|
||||||
|
~title:"Block not invalid"
|
||||||
|
~description:"The invalid block to be unmarked was not actually invalid."
|
||||||
|
~pp:(fun ppf block ->
|
||||||
|
Format.fprintf ppf "Block %a was expected to be invalid, but was not actually invalid."
|
||||||
|
Block_hash.pp block)
|
||||||
|
Data_encoding.(obj1 (req "block" Block_hash.encoding))
|
||||||
|
(function Block_not_invalid block -> Some block | _ -> None)
|
||||||
|
(fun block -> Block_not_invalid block)
|
||||||
|
|
||||||
|
(* Block database error *)
|
||||||
|
|
||||||
|
type error += Inconsistent_hash of Context_hash.t * Context_hash.t
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Inconsistent hash *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"inconsistentContextHash"
|
||||||
|
~title:"Inconsistent commit hash"
|
||||||
|
~description:
|
||||||
|
"When commiting the context of a block, the announced context \
|
||||||
|
hash was not the one computed at commit time."
|
||||||
|
~pp: (fun ppf (got, exp) ->
|
||||||
|
Format.fprintf ppf
|
||||||
|
"@[<v 2>Inconsistant hash:@ got: %a@ expected: %a"
|
||||||
|
Context_hash.pp got
|
||||||
|
Context_hash.pp exp)
|
||||||
|
Data_encoding.(obj2
|
||||||
|
(req "wrong_context_hash" Context_hash.encoding)
|
||||||
|
(req "expected_context_hash" Context_hash.encoding))
|
||||||
|
(function Inconsistent_hash (got, exp) -> Some (got, exp) | _ -> None)
|
||||||
|
(fun (got, exp) -> Inconsistent_hash (got, exp))
|
||||||
|
|
||||||
|
(******************* Bootstrap pipeline errors ****************************)
|
||||||
|
|
||||||
|
type error += Invalid_locator of P2p_peer.Id.t * Block_locator.t
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Invalid locator *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"invalidLocator"
|
||||||
|
~title:"Invalid block locator"
|
||||||
|
~description:"Block locator is invalid."
|
||||||
|
~pp: (fun ppf (id, locator) ->
|
||||||
|
Format.fprintf ppf
|
||||||
|
"Invalid block locator %a on peer %a"
|
||||||
|
Block_locator.pp locator
|
||||||
|
P2p_peer.Id.pp id)
|
||||||
|
Data_encoding.(obj2
|
||||||
|
(req "id" P2p_peer.Id.encoding)
|
||||||
|
(req "locator" Block_locator.encoding))
|
||||||
|
(function | Invalid_locator (id, loc) -> Some (id, loc) | _ -> None)
|
||||||
|
(fun (id, loc) -> Invalid_locator (id, loc))
|
||||||
|
|
||||||
|
(******************* Protocol validator errors ****************************)
|
||||||
|
|
||||||
|
type protocol_error =
|
||||||
|
| Compilation_failed
|
||||||
|
| Dynlinking_failed
|
||||||
|
|
||||||
|
type error += Invalid_protocol of { hash: Protocol_hash.t ; error: protocol_error }
|
||||||
|
|
||||||
|
let protocol_error_encoding =
|
||||||
|
let open Data_encoding in
|
||||||
|
union
|
||||||
|
[
|
||||||
|
case (Tag 0)
|
||||||
|
(obj1
|
||||||
|
(req "error" (constant "compilation_failed")))
|
||||||
|
(function Compilation_failed -> Some ()
|
||||||
|
| _ -> None)
|
||||||
|
(fun () -> Compilation_failed) ;
|
||||||
|
case (Tag 1)
|
||||||
|
(obj1
|
||||||
|
(req "error" (constant "dynlinking_failed")))
|
||||||
|
(function Dynlinking_failed -> Some ()
|
||||||
|
| _ -> None)
|
||||||
|
(fun () -> Dynlinking_failed) ;
|
||||||
|
]
|
||||||
|
|
||||||
|
let pp_protocol_error ppf = function
|
||||||
|
| Compilation_failed ->
|
||||||
|
Format.fprintf ppf "compilation error"
|
||||||
|
| Dynlinking_failed ->
|
||||||
|
Format.fprintf ppf "dynlinking error"
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Invalid protocol *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id:"validator.invalid_protocol"
|
||||||
|
~title:"Invalid protocol"
|
||||||
|
~description:"Invalid protocol."
|
||||||
|
~pp:begin fun ppf (protocol, error) ->
|
||||||
|
Format.fprintf ppf
|
||||||
|
"@[<v 2>Invalid protocol %a@ %a@]"
|
||||||
|
Protocol_hash.pp_short protocol pp_protocol_error error
|
||||||
|
end
|
||||||
|
Data_encoding.(merge_objs
|
||||||
|
(obj1 (req "invalid_protocol" Protocol_hash.encoding))
|
||||||
|
protocol_error_encoding)
|
||||||
|
(function Invalid_protocol { hash ; error } ->
|
||||||
|
Some (hash, error) | _ -> None)
|
||||||
|
(fun (hash, error) ->
|
||||||
|
Invalid_protocol { hash ; error })
|
||||||
|
|
||||||
|
(********************* Peer validator errors ******************************)
|
||||||
|
|
||||||
|
type error +=
|
||||||
|
| Unknown_ancestor
|
||||||
|
| Known_invalid
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Unknown ancestor *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id: "node.peer_validator.unknown_ancestor"
|
||||||
|
~title: "Unknown ancestor"
|
||||||
|
~description: "Unknown ancestor block found in the peer's chain"
|
||||||
|
~pp: (fun ppf () -> Format.fprintf ppf "Unknown ancestor")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Unknown_ancestor -> Some () | _ -> None)
|
||||||
|
(fun () -> Unknown_ancestor) ;
|
||||||
|
(* Known invalid *)
|
||||||
|
register_error_kind
|
||||||
|
`Permanent
|
||||||
|
~id: "node.peer_validator.known_invalid"
|
||||||
|
~title: "Known invalid"
|
||||||
|
~description: "Known invalid block found in the peer's chain"
|
||||||
|
~pp: (fun ppf () -> Format.fprintf ppf "Known invalid")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Known_invalid -> Some () | _ -> None)
|
||||||
|
(fun () -> Known_invalid)
|
||||||
|
|
||||||
|
(************************ Validator errors ********************************)
|
||||||
|
|
||||||
|
type error += Inactive_chain of Chain_id.t
|
||||||
|
|
||||||
|
let () =
|
||||||
|
(* Inactive network *)
|
||||||
|
register_error_kind
|
||||||
|
`Branch
|
||||||
|
~id: "node.validator.inactive_chain"
|
||||||
|
~title: "Inactive chain"
|
||||||
|
~description: "Attempted validation of a block from an inactive chain."
|
||||||
|
~pp: (fun ppf chain ->
|
||||||
|
Format.fprintf ppf
|
||||||
|
"Tried to validate a block from chain %a, \
|
||||||
|
that is not currently considered active."
|
||||||
|
Chain_id.pp chain)
|
||||||
|
Data_encoding.(obj1 (req "inactive_chain" Chain_id.encoding))
|
||||||
|
(function Inactive_chain chain -> Some chain | _ -> None)
|
||||||
|
(fun chain -> Inactive_chain chain)
|
Loading…
Reference in New Issue
Block a user