Shell: enforce maximum operation size
This commit is contained in:
parent
84a2f1ee29
commit
f9e6831363
@ -51,6 +51,8 @@ type block_error =
|
|||||||
}
|
}
|
||||||
| Unexpected_number_of_validation_passes of int (* uint8 *)
|
| Unexpected_number_of_validation_passes of int (* uint8 *)
|
||||||
| Too_many_operations of { pass: int; found: int; max: int }
|
| Too_many_operations of { pass: int; found: int; max: int }
|
||||||
|
| Oversized_operation of { operation: Operation_hash.t;
|
||||||
|
size: int; max: int }
|
||||||
|
|
||||||
let block_error_encoding =
|
let block_error_encoding =
|
||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
@ -144,6 +146,18 @@ let block_error_encoding =
|
|||||||
| _ -> None)
|
| _ -> None)
|
||||||
(fun ((), pass, found, max) ->
|
(fun ((), pass, found, max) ->
|
||||||
Too_many_operations { pass ; found ; max }) ;
|
Too_many_operations { pass ; found ; max }) ;
|
||||||
|
case
|
||||||
|
(obj4
|
||||||
|
(req "error" (constant "oversized_operation"))
|
||||||
|
(req "operation" Operation_hash.encoding)
|
||||||
|
(req "found" int31)
|
||||||
|
(req "max" int31))
|
||||||
|
(function
|
||||||
|
| Oversized_operation { operation ; size ; max } ->
|
||||||
|
Some ((), operation, size, max)
|
||||||
|
| _ -> None)
|
||||||
|
(fun ((), operation, size, max) ->
|
||||||
|
Oversized_operation { operation ; size ; max }) ;
|
||||||
]
|
]
|
||||||
|
|
||||||
let pp_block_error ppf = function
|
let pp_block_error ppf = function
|
||||||
@ -200,6 +214,10 @@ let pp_block_error ppf = function
|
|||||||
Format.fprintf ppf
|
Format.fprintf ppf
|
||||||
"Too many operations in validation pass %d (found: %d, max: %d)"
|
"Too many operations in validation pass %d (found: %d, max: %d)"
|
||||||
pass found max
|
pass found max
|
||||||
|
| Oversized_operation { operation ; size ; max } ->
|
||||||
|
Format.fprintf ppf
|
||||||
|
"Oversized operation %a (size: %d, max: %d)"
|
||||||
|
Operation_hash.pp_short operation size max
|
||||||
|
|
||||||
type error +=
|
type error +=
|
||||||
| Invalid_block of
|
| Invalid_block of
|
||||||
@ -345,7 +363,18 @@ let apply_block
|
|||||||
fail_unless
|
fail_unless
|
||||||
(List.length ops <= max)
|
(List.length ops <= max)
|
||||||
(invalid_block hash @@
|
(invalid_block hash @@
|
||||||
Too_many_operations { pass = i + 1 ; found = List.length ops ; max }))
|
Too_many_operations
|
||||||
|
{ pass = i + 1 ; found = List.length ops ; max }) >>=? fun () ->
|
||||||
|
let max_size = State.Block.max_operation_data_length pred in
|
||||||
|
iter_p (fun op ->
|
||||||
|
let size = Data_encoding.Binary.length Operation.encoding op in
|
||||||
|
fail_unless
|
||||||
|
(size <= max_size)
|
||||||
|
(invalid_block hash @@
|
||||||
|
Oversized_operation
|
||||||
|
{ operation = Operation.hash op ;
|
||||||
|
size ; max = max_size })) ops >>=? fun () ->
|
||||||
|
return ())
|
||||||
operations (State.Block.max_number_of_operations pred) >>=? fun () ->
|
operations (State.Block.max_number_of_operations pred) >>=? fun () ->
|
||||||
let operation_hashes = List.map (List.map Operation.hash) operations in
|
let operation_hashes = List.map (List.map Operation.hash) operations in
|
||||||
check_liveness net_state pred hash operation_hashes operations >>=? fun () ->
|
check_liveness net_state pred hash operation_hashes operations >>=? fun () ->
|
||||||
|
@ -27,6 +27,8 @@ type block_error =
|
|||||||
}
|
}
|
||||||
| Unexpected_number_of_validation_passes of int (* uint8 *)
|
| Unexpected_number_of_validation_passes of int (* uint8 *)
|
||||||
| Too_many_operations of { pass: int; found: int; max: int }
|
| Too_many_operations of { pass: int; found: int; max: int }
|
||||||
|
| Oversized_operation of { operation: Operation_hash.t;
|
||||||
|
size: int; max: int }
|
||||||
|
|
||||||
type error +=
|
type error +=
|
||||||
| Invalid_block of
|
| Invalid_block of
|
||||||
|
@ -126,6 +126,7 @@ let rec create
|
|||||||
global_valid_block_input db net_state =
|
global_valid_block_input db net_state =
|
||||||
let net_db = Distributed_db.activate db net_state in
|
let net_db = Distributed_db.activate db net_state in
|
||||||
Prevalidator.create
|
Prevalidator.create
|
||||||
|
~max_operations:2000 (* FIXME temporary constant *)
|
||||||
~operation_timeout:timeout.operation net_db >>= fun prevalidator ->
|
~operation_timeout:timeout.operation net_db >>= fun prevalidator ->
|
||||||
let valid_block_input = Watcher.create_input () in
|
let valid_block_input = Watcher.create_input () in
|
||||||
let new_head_input = Watcher.create_input () in
|
let new_head_input = Watcher.create_input () in
|
||||||
|
@ -82,29 +82,29 @@ let empty_result =
|
|||||||
branch_refused = Operation_hash.Map.empty ;
|
branch_refused = Operation_hash.Map.empty ;
|
||||||
branch_delayed = Operation_hash.Map.empty }
|
branch_delayed = Operation_hash.Map.empty }
|
||||||
|
|
||||||
let rec apply_operations apply_operation state r ~sort ops =
|
let rec apply_operations apply_operation state r max_ops ~sort ops =
|
||||||
Lwt_list.fold_left_s
|
Lwt_list.fold_left_s
|
||||||
(fun (state, r) (hash, op, parsed_op) ->
|
(fun (state, max_ops, r) (hash, op, parsed_op) ->
|
||||||
apply_operation state parsed_op >>= function
|
apply_operation state max_ops op parsed_op >>= function
|
||||||
| Ok state ->
|
| Ok state ->
|
||||||
let applied = (hash, op) :: r.applied in
|
let applied = (hash, op) :: r.applied in
|
||||||
Lwt.return (state, { r with applied } )
|
Lwt.return (state, max_ops - 1, { r with applied })
|
||||||
| Error errors ->
|
| Error errors ->
|
||||||
match classify_errors errors with
|
match classify_errors errors with
|
||||||
| `Branch ->
|
| `Branch ->
|
||||||
let branch_refused =
|
let branch_refused =
|
||||||
Operation_hash.Map.add hash (op, errors) r.branch_refused in
|
Operation_hash.Map.add hash (op, errors) r.branch_refused in
|
||||||
Lwt.return (state, { r with branch_refused })
|
Lwt.return (state, max_ops, { r with branch_refused })
|
||||||
| `Permanent ->
|
| `Permanent ->
|
||||||
let refused =
|
let refused =
|
||||||
Operation_hash.Map.add hash (op, errors) r.refused in
|
Operation_hash.Map.add hash (op, errors) r.refused in
|
||||||
Lwt.return (state, { r with refused })
|
Lwt.return (state, max_ops, { r with refused })
|
||||||
| `Temporary ->
|
| `Temporary ->
|
||||||
let branch_delayed =
|
let branch_delayed =
|
||||||
Operation_hash.Map.add hash (op, errors) r.branch_delayed in
|
Operation_hash.Map.add hash (op, errors) r.branch_delayed in
|
||||||
Lwt.return (state, { r with branch_delayed }))
|
Lwt.return (state, max_ops, { r with branch_delayed }))
|
||||||
(state, r)
|
(state, max_ops, r)
|
||||||
ops >>= fun (state, r) ->
|
ops >>= fun (state, max_ops, r) ->
|
||||||
match r.applied with
|
match r.applied with
|
||||||
| _ :: _ when sort ->
|
| _ :: _ when sort ->
|
||||||
let rechecked_operations =
|
let rechecked_operations =
|
||||||
@ -113,25 +113,38 @@ let rec apply_operations apply_operation state r ~sort ops =
|
|||||||
ops in
|
ops in
|
||||||
let remaining = List.length rechecked_operations in
|
let remaining = List.length rechecked_operations in
|
||||||
if remaining = 0 || remaining = List.length ops then
|
if remaining = 0 || remaining = List.length ops then
|
||||||
Lwt.return (state, r)
|
Lwt.return (state, max_ops, r)
|
||||||
else
|
else
|
||||||
apply_operations apply_operation state r ~sort rechecked_operations
|
apply_operations apply_operation state r max_ops ~sort rechecked_operations
|
||||||
| _ ->
|
| _ ->
|
||||||
Lwt.return (state, r)
|
Lwt.return (state, max_ops, r)
|
||||||
|
|
||||||
type prevalidation_state =
|
type prevalidation_state =
|
||||||
State : { proto : 'a proto ; state : 'a }
|
State : { proto : 'a proto ; state : 'a ;
|
||||||
|
max_number_of_operations : int ;
|
||||||
|
max_operation_data_length : int }
|
||||||
-> prevalidation_state
|
-> prevalidation_state
|
||||||
|
|
||||||
and 'a proto =
|
and 'a proto =
|
||||||
(module State.Registred_protocol.T with type validation_state = 'a)
|
(module State.Registred_protocol.T with type validation_state = 'a)
|
||||||
|
|
||||||
let start_prevalidation ?proto_header ~predecessor ~timestamp () =
|
let start_prevalidation
|
||||||
|
?proto_header
|
||||||
|
?max_number_of_operations
|
||||||
|
~predecessor ~timestamp () =
|
||||||
let { Block_header.shell =
|
let { Block_header.shell =
|
||||||
{ fitness = predecessor_fitness ;
|
{ fitness = predecessor_fitness ;
|
||||||
timestamp = predecessor_timestamp ;
|
timestamp = predecessor_timestamp ;
|
||||||
level = predecessor_level } } =
|
level = predecessor_level } } =
|
||||||
State.Block.header predecessor in
|
State.Block.header predecessor in
|
||||||
|
let max_number_of_operations =
|
||||||
|
match max_number_of_operations with
|
||||||
|
| Some max -> max
|
||||||
|
| None ->
|
||||||
|
try List.hd (State.Block.max_number_of_operations predecessor)
|
||||||
|
with _ -> 0 in
|
||||||
|
let max_operation_data_length =
|
||||||
|
State.Block.max_operation_data_length predecessor in
|
||||||
State.Block.context predecessor >>= fun predecessor_context ->
|
State.Block.context predecessor >>= fun predecessor_context ->
|
||||||
Context.get_protocol predecessor_context >>= fun protocol ->
|
Context.get_protocol predecessor_context >>= fun protocol ->
|
||||||
let predecessor = State.Block.hash predecessor in
|
let predecessor = State.Block.hash predecessor in
|
||||||
@ -158,12 +171,39 @@ let start_prevalidation ?proto_header ~predecessor ~timestamp () =
|
|||||||
?proto_header
|
?proto_header
|
||||||
()
|
()
|
||||||
>>=? fun state ->
|
>>=? fun state ->
|
||||||
return (State { proto = (module Proto) ; state })
|
return (State { proto = (module Proto) ; state ;
|
||||||
|
max_number_of_operations ; max_operation_data_length })
|
||||||
|
|
||||||
type error += Parse_error
|
type error += Parse_error
|
||||||
|
type error += Too_many_operations
|
||||||
|
type error += Oversized_operation of { size: int ; max: int }
|
||||||
|
|
||||||
|
let () =
|
||||||
|
register_error_kind `Temporary
|
||||||
|
~id:"prevalidation.too_many_operations"
|
||||||
|
~title:"Too many pending operations in prevalidation"
|
||||||
|
~description:"The prevalidation context is full."
|
||||||
|
~pp:(fun ppf () ->
|
||||||
|
Format.fprintf ppf "Too many operation in prevalidation context.")
|
||||||
|
Data_encoding.empty
|
||||||
|
(function Too_many_operations -> Some () | _ -> None)
|
||||||
|
(fun () -> Too_many_operations) ;
|
||||||
|
register_error_kind `Permanent
|
||||||
|
~id:"prevalidation.oversized_operation"
|
||||||
|
~title:"Oversized operation"
|
||||||
|
~description:"The operation size is bigger than allowed."
|
||||||
|
~pp:(fun ppf (size, max) ->
|
||||||
|
Format.fprintf ppf "Oversized operation (size: %d, max: %d)"
|
||||||
|
size max)
|
||||||
|
Data_encoding.(obj2
|
||||||
|
(req "size" int31)
|
||||||
|
(req "max_size" int31))
|
||||||
|
(function Oversized_operation { size ; max } -> Some (size, max) | _ -> None)
|
||||||
|
(fun (size, max) -> Oversized_operation { size ; max })
|
||||||
|
|
||||||
let prevalidate
|
let prevalidate
|
||||||
(State { proto = (module Proto) ; state })
|
(State { proto = (module Proto) ; state ;
|
||||||
|
max_number_of_operations ; max_operation_data_length })
|
||||||
~sort (ops : (Operation_hash.t * Operation.t) list)=
|
~sort (ops : (Operation_hash.t * Operation.t) list)=
|
||||||
let ops =
|
let ops =
|
||||||
List.map
|
List.map
|
||||||
@ -185,9 +225,18 @@ let prevalidate
|
|||||||
let compare (_, _, op1) (_, _, op2) = Proto.compare_operations op1 op2 in
|
let compare (_, _, op1) (_, _, op2) = Proto.compare_operations op1 op2 in
|
||||||
List.sort compare parsed_ops
|
List.sort compare parsed_ops
|
||||||
else parsed_ops in
|
else parsed_ops in
|
||||||
|
let apply_operation state max_ops op parse_op =
|
||||||
|
let size = Data_encoding.Binary.length Operation.encoding op in
|
||||||
|
if max_ops <= 0 then
|
||||||
|
fail Too_many_operations
|
||||||
|
else if size > max_operation_data_length then
|
||||||
|
fail (Oversized_operation { size ; max = max_operation_data_length })
|
||||||
|
else
|
||||||
|
Proto.apply_operation state parse_op in
|
||||||
apply_operations
|
apply_operations
|
||||||
Proto.apply_operation
|
apply_operation
|
||||||
state empty_result ~sort sorted_ops >>= fun (state, r) ->
|
state empty_result max_number_of_operations
|
||||||
|
~sort sorted_ops >>= fun (state, max_number_of_operations, r) ->
|
||||||
let r =
|
let r =
|
||||||
{ r with
|
{ r with
|
||||||
applied = List.rev r.applied ;
|
applied = List.rev r.applied ;
|
||||||
@ -195,7 +244,9 @@ let prevalidate
|
|||||||
List.fold_left
|
List.fold_left
|
||||||
(fun map (h, op, err) -> Operation_hash.Map.add h (op, err) map)
|
(fun map (h, op, err) -> Operation_hash.Map.add h (op, err) map)
|
||||||
r.branch_refused invalid_ops } in
|
r.branch_refused invalid_ops } in
|
||||||
Lwt.return (State { proto = (module Proto) ; state }, r)
|
Lwt.return (State { proto = (module Proto) ; state ;
|
||||||
|
max_number_of_operations ; max_operation_data_length },
|
||||||
|
r)
|
||||||
|
|
||||||
let end_prevalidation (State { proto = (module Proto) ; state }) =
|
let end_prevalidation (State { proto = (module Proto) ; state }) =
|
||||||
Proto.finalize_block state
|
Proto.finalize_block state
|
||||||
|
@ -30,6 +30,7 @@ type prevalidation_state
|
|||||||
|
|
||||||
val start_prevalidation :
|
val start_prevalidation :
|
||||||
?proto_header: MBytes.t ->
|
?proto_header: MBytes.t ->
|
||||||
|
?max_number_of_operations: int ->
|
||||||
predecessor: State.Block.t ->
|
predecessor: State.Block.t ->
|
||||||
timestamp: Time.t ->
|
timestamp: Time.t ->
|
||||||
unit -> prevalidation_state tzresult Lwt.t
|
unit -> prevalidation_state tzresult Lwt.t
|
||||||
|
@ -74,6 +74,7 @@ let merge _key a b =
|
|||||||
| _, Some y -> Some y
|
| _, Some y -> Some y
|
||||||
|
|
||||||
let create
|
let create
|
||||||
|
~max_operations
|
||||||
~operation_timeout
|
~operation_timeout
|
||||||
net_db =
|
net_db =
|
||||||
|
|
||||||
@ -84,11 +85,18 @@ let create
|
|||||||
|
|
||||||
Chain.head net_state >>= fun head ->
|
Chain.head net_state >>= fun head ->
|
||||||
let timestamp = ref (Time.now ()) in
|
let timestamp = ref (Time.now ()) in
|
||||||
(start_prevalidation ~predecessor:head ~timestamp:!timestamp () >|= ref) >>= fun validation_state ->
|
let max_number_of_operations =
|
||||||
|
try 2 * List.hd (State.Block.max_number_of_operations head)
|
||||||
|
with _ -> 0 in
|
||||||
|
(start_prevalidation
|
||||||
|
~max_number_of_operations
|
||||||
|
~predecessor:head
|
||||||
|
~timestamp:!timestamp () >|= ref) >>= fun validation_state ->
|
||||||
let pending = Operation_hash.Table.create 53 in
|
let pending = Operation_hash.Table.create 53 in
|
||||||
let head = ref head in
|
let head = ref head in
|
||||||
let mempool = ref Mempool.empty in
|
let mempool = ref Mempool.empty in
|
||||||
let operations = ref empty_result in
|
let operations = ref empty_result in
|
||||||
|
let operation_count = ref 0 in (* unprocessed + operations/mempool *)
|
||||||
Chain_traversal.live_blocks
|
Chain_traversal.live_blocks
|
||||||
!head
|
!head
|
||||||
(State.Block.max_operations_ttl !head)
|
(State.Block.max_operations_ttl !head)
|
||||||
@ -150,9 +158,13 @@ let create
|
|||||||
(fun (h, op) ->
|
(fun (h, op) ->
|
||||||
if Block_hash.Set.mem op.Operation.shell.branch !live_blocks then
|
if Block_hash.Set.mem op.Operation.shell.branch !live_blocks then
|
||||||
Lwt.return_some (h, op)
|
Lwt.return_some (h, op)
|
||||||
else
|
else begin
|
||||||
Lwt.return_none)
|
Distributed_db.Operation.clear_or_cancel net_db h ;
|
||||||
|
Lwt.return_none
|
||||||
|
end)
|
||||||
(Operation_hash.Map.bindings ops) >>= fun rops ->
|
(Operation_hash.Map.bindings ops) >>= fun rops ->
|
||||||
|
operation_count :=
|
||||||
|
!operation_count - Operation_hash.Map.cardinal ops + List.length rops ;
|
||||||
match !validation_state with
|
match !validation_state with
|
||||||
| Ok validation_state ->
|
| Ok validation_state ->
|
||||||
prevalidate validation_state ~sort:true rops >>= fun (state, r) ->
|
prevalidate validation_state ~sort:true rops >>= fun (state, r) ->
|
||||||
@ -198,9 +210,8 @@ let create
|
|||||||
~head:(State.Block.hash !head) !mempool >>= fun () ->
|
~head:(State.Block.hash !head) !mempool >>= fun () ->
|
||||||
if broadcast then broadcast_new_operations r ;
|
if broadcast then broadcast_new_operations r ;
|
||||||
Lwt_list.iter_s
|
Lwt_list.iter_s
|
||||||
(fun (_op, _exns) ->
|
(fun (op, _exns) ->
|
||||||
(* FIXME *)
|
Distributed_db.Operation.clear_or_cancel net_db op ;
|
||||||
(* Distributed_db.Operation.mark_invalid net_db op exns >>= fun _ -> *)
|
|
||||||
Lwt.return_unit)
|
Lwt.return_unit)
|
||||||
(Operation_hash.Map.bindings r.refused) >>= fun () ->
|
(Operation_hash.Map.bindings r.refused) >>= fun () ->
|
||||||
(* TODO. Keep a bounded set of 'refused' operations. *)
|
(* TODO. Keep a bounded set of 'refused' operations. *)
|
||||||
@ -237,6 +248,7 @@ let create
|
|||||||
prevalidate validation_state
|
prevalidate validation_state
|
||||||
~sort:true rops >>= fun (state, res) ->
|
~sort:true rops >>= fun (state, res) ->
|
||||||
let register h op =
|
let register h op =
|
||||||
|
incr operation_count ;
|
||||||
live_operations :=
|
live_operations :=
|
||||||
Operation_hash.Set.add h !live_operations ;
|
Operation_hash.Set.add h !live_operations ;
|
||||||
Distributed_db.inject_operation
|
Distributed_db.inject_operation
|
||||||
@ -286,6 +298,8 @@ let create
|
|||||||
Lwt.wakeup w result ;
|
Lwt.wakeup w result ;
|
||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
end
|
end
|
||||||
|
| `Register (_gid, _mempool) when !operation_count >= max_operations ->
|
||||||
|
Lwt.return_unit
|
||||||
| `Register (gid, mempool) ->
|
| `Register (gid, mempool) ->
|
||||||
let ops =
|
let ops =
|
||||||
Operation_hash.Set.elements mempool.Mempool.pending @
|
Operation_hash.Set.elements mempool.Mempool.pending @
|
||||||
@ -326,10 +340,16 @@ let create
|
|||||||
Lwt.return_unit
|
Lwt.return_unit
|
||||||
| `Handle (h, op) ->
|
| `Handle (h, op) ->
|
||||||
Operation_hash.Table.remove pending h ;
|
Operation_hash.Table.remove pending h ;
|
||||||
broadcast_unprocessed := true ;
|
if !operation_count < max_operations then begin
|
||||||
unprocessed := Operation_hash.Map.singleton h op ;
|
broadcast_unprocessed := true ;
|
||||||
lwt_debug "register %a" Operation_hash.pp_short h >>= fun () ->
|
incr operation_count ;
|
||||||
Lwt.return_unit
|
unprocessed := Operation_hash.Map.singleton h op ;
|
||||||
|
lwt_debug "register %a" Operation_hash.pp_short h >>= fun () ->
|
||||||
|
Lwt.return_unit
|
||||||
|
end else begin
|
||||||
|
Distributed_db.Operation.clear_or_cancel net_db h ;
|
||||||
|
Lwt.return_unit
|
||||||
|
end
|
||||||
| `Flush (new_head : State.Block.t) ->
|
| `Flush (new_head : State.Block.t) ->
|
||||||
list_pendings
|
list_pendings
|
||||||
~maintain_net_db:net_db
|
~maintain_net_db:net_db
|
||||||
@ -348,6 +368,7 @@ let create
|
|||||||
operations := empty_result ;
|
operations := empty_result ;
|
||||||
broadcast_unprocessed := false ;
|
broadcast_unprocessed := false ;
|
||||||
unprocessed := new_mempool ;
|
unprocessed := new_mempool ;
|
||||||
|
operation_count := Operation_hash.Map.cardinal new_mempool ;
|
||||||
timestamp := Time.now () ;
|
timestamp := Time.now () ;
|
||||||
live_blocks := new_live_blocks ;
|
live_blocks := new_live_blocks ;
|
||||||
live_operations := new_live_operations ;
|
live_operations := new_live_operations ;
|
||||||
|
@ -32,6 +32,7 @@ type t
|
|||||||
|
|
||||||
(** Creation and destruction of a "prevalidation" worker. *)
|
(** Creation and destruction of a "prevalidation" worker. *)
|
||||||
val create:
|
val create:
|
||||||
|
max_operations: int ->
|
||||||
operation_timeout: float ->
|
operation_timeout: float ->
|
||||||
Distributed_db.net_db -> t Lwt.t
|
Distributed_db.net_db -> t Lwt.t
|
||||||
val shutdown: t -> unit Lwt.t
|
val shutdown: t -> unit Lwt.t
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
|
|
||||||
let version_number = "\000"
|
let version_number = "\000"
|
||||||
|
|
||||||
let max_operation_data_length = 16 * 1024
|
|
||||||
let proof_of_work_nonce_size = 8
|
let proof_of_work_nonce_size = 8
|
||||||
let nonce_length = 32
|
let nonce_length = 32
|
||||||
|
|
||||||
@ -44,6 +43,7 @@ type constants = {
|
|||||||
bootstrap_keys: Ed25519.Public_key.t list ;
|
bootstrap_keys: Ed25519.Public_key.t list ;
|
||||||
dictator_pubkey: Ed25519.Public_key.t ;
|
dictator_pubkey: Ed25519.Public_key.t ;
|
||||||
max_number_of_operations: int list ;
|
max_number_of_operations: int list ;
|
||||||
|
max_operation_data_length: int ;
|
||||||
}
|
}
|
||||||
|
|
||||||
let read_public_key s =
|
let read_public_key s =
|
||||||
@ -76,6 +76,8 @@ let default = {
|
|||||||
"4d5373455738070434f214826d301a1c206780d7f789fcbf94c2149b2e0718cc" ;
|
"4d5373455738070434f214826d301a1c206780d7f789fcbf94c2149b2e0718cc" ;
|
||||||
max_number_of_operations =
|
max_number_of_operations =
|
||||||
[ 300 ] ;
|
[ 300 ] ;
|
||||||
|
max_operation_data_length =
|
||||||
|
16 * 1024 ; (* 16kB *)
|
||||||
}
|
}
|
||||||
|
|
||||||
let opt (=) def v = if def = v then None else Some v
|
let opt (=) def v = if def = v then None else Some v
|
||||||
@ -127,6 +129,9 @@ let constants_encoding =
|
|||||||
and max_number_of_operations =
|
and max_number_of_operations =
|
||||||
opt CompareListInt.(=)
|
opt CompareListInt.(=)
|
||||||
default.max_number_of_operations c.max_number_of_operations
|
default.max_number_of_operations c.max_number_of_operations
|
||||||
|
and max_operation_data_length =
|
||||||
|
opt Compare.Int.(=)
|
||||||
|
default.max_operation_data_length c.max_operation_data_length
|
||||||
in
|
in
|
||||||
((( cycle_length,
|
((( cycle_length,
|
||||||
voting_period_length,
|
voting_period_length,
|
||||||
@ -138,7 +143,8 @@ let constants_encoding =
|
|||||||
proof_of_work_threshold,
|
proof_of_work_threshold,
|
||||||
bootstrap_keys,
|
bootstrap_keys,
|
||||||
dictator_pubkey),
|
dictator_pubkey),
|
||||||
max_number_of_operations), ()) )
|
(max_number_of_operations,
|
||||||
|
max_operation_data_length)), ()) )
|
||||||
(fun ((( cycle_length,
|
(fun ((( cycle_length,
|
||||||
voting_period_length,
|
voting_period_length,
|
||||||
time_before_reward,
|
time_before_reward,
|
||||||
@ -149,7 +155,8 @@ let constants_encoding =
|
|||||||
proof_of_work_threshold,
|
proof_of_work_threshold,
|
||||||
bootstrap_keys,
|
bootstrap_keys,
|
||||||
dictator_pubkey),
|
dictator_pubkey),
|
||||||
max_number_of_operations), ()) ->
|
(max_number_of_operations,
|
||||||
|
max_operation_data_length)), ()) ->
|
||||||
{ cycle_length =
|
{ cycle_length =
|
||||||
unopt default.cycle_length cycle_length ;
|
unopt default.cycle_length cycle_length ;
|
||||||
voting_period_length =
|
voting_period_length =
|
||||||
@ -174,6 +181,8 @@ let constants_encoding =
|
|||||||
unopt default.dictator_pubkey dictator_pubkey ;
|
unopt default.dictator_pubkey dictator_pubkey ;
|
||||||
max_number_of_operations =
|
max_number_of_operations =
|
||||||
unopt default.max_number_of_operations max_number_of_operations ;
|
unopt default.max_number_of_operations max_number_of_operations ;
|
||||||
|
max_operation_data_length =
|
||||||
|
unopt default.max_operation_data_length max_operation_data_length ;
|
||||||
} )
|
} )
|
||||||
Data_encoding.(
|
Data_encoding.(
|
||||||
merge_objs
|
merge_objs
|
||||||
@ -189,8 +198,10 @@ let constants_encoding =
|
|||||||
(opt "proof_of_work_threshold" int64)
|
(opt "proof_of_work_threshold" int64)
|
||||||
(opt "bootstrap_keys" (list Ed25519.Public_key.encoding))
|
(opt "bootstrap_keys" (list Ed25519.Public_key.encoding))
|
||||||
(opt "dictator_pubkey" Ed25519.Public_key.encoding))
|
(opt "dictator_pubkey" Ed25519.Public_key.encoding))
|
||||||
(obj1
|
(obj2
|
||||||
(opt "max_number_of_operations" (list uint16))))
|
(opt "max_number_of_operations" (list uint16))
|
||||||
|
(opt "max_number_of_operations" int31)
|
||||||
|
))
|
||||||
unit)
|
unit)
|
||||||
|
|
||||||
type error += Constant_read of exn
|
type error += Constant_read of exn
|
||||||
|
@ -327,7 +327,6 @@ module Encoding = struct
|
|||||||
end
|
end
|
||||||
|
|
||||||
type error += Cannot_parse_operation
|
type error += Cannot_parse_operation
|
||||||
type error += Operation_exceeds_max_length of int
|
|
||||||
|
|
||||||
let encoding =
|
let encoding =
|
||||||
let open Data_encoding in
|
let open Data_encoding in
|
||||||
@ -353,30 +352,15 @@ let () =
|
|||||||
Format.fprintf ppf "The operation cannot be parsed")
|
Format.fprintf ppf "The operation cannot be parsed")
|
||||||
Data_encoding.unit
|
Data_encoding.unit
|
||||||
(function Cannot_parse_operation -> Some () | _ -> None)
|
(function Cannot_parse_operation -> Some () | _ -> None)
|
||||||
(fun () -> Cannot_parse_operation) ;
|
(fun () -> Cannot_parse_operation)
|
||||||
register_error_kind
|
|
||||||
`Branch
|
|
||||||
~id:"operationExceedsMaxLength"
|
|
||||||
~title:"Operation exceeded maximum allowed operation length"
|
|
||||||
~description:"The operation exceeded the maximum allowed length of an operation."
|
|
||||||
~pp:(fun ppf len ->
|
|
||||||
Format.fprintf ppf
|
|
||||||
"The operation was %d bytes, but operations must be less than %d bytes."
|
|
||||||
len Constants_repr.max_operation_data_length)
|
|
||||||
Data_encoding.(obj1 (req "length" int31))
|
|
||||||
(function Operation_exceeds_max_length len -> Some len | _ -> None)
|
|
||||||
(fun len -> Operation_exceeds_max_length len)
|
|
||||||
|
|
||||||
let parse hash (op: Operation.t) =
|
let parse hash (op: Operation.t) =
|
||||||
if not (Compare.Int.(MBytes.length op.proto <= Constants_repr.max_operation_data_length)) then
|
match Data_encoding.Binary.of_bytes
|
||||||
error (Operation_exceeds_max_length (MBytes.length op.proto))
|
Encoding.signed_proto_operation_encoding
|
||||||
else
|
op.proto with
|
||||||
match Data_encoding.Binary.of_bytes
|
| Some (contents, signature) ->
|
||||||
Encoding.signed_proto_operation_encoding
|
ok { hash ; shell = op.shell ; contents ; signature }
|
||||||
op.proto with
|
| None -> error Cannot_parse_operation
|
||||||
| Some (contents, signature) ->
|
|
||||||
ok { hash ; shell = op.shell ; contents ; signature }
|
|
||||||
| None -> error Cannot_parse_operation
|
|
||||||
|
|
||||||
type error += Invalid_signature (* `Permanent *)
|
type error += Invalid_signature (* `Permanent *)
|
||||||
type error += Missing_signature (* `Permanent *)
|
type error += Missing_signature (* `Permanent *)
|
||||||
@ -429,5 +413,3 @@ let parse_proto bytes =
|
|||||||
| None -> fail Cannot_parse_operation
|
| None -> fail Cannot_parse_operation
|
||||||
|
|
||||||
include Encoding
|
include Encoding
|
||||||
|
|
||||||
let max_operation_data_length = Constants_repr.max_operation_data_length
|
|
||||||
|
@ -112,5 +112,3 @@ val proto_operation_encoding:
|
|||||||
|
|
||||||
val unsigned_operation_encoding:
|
val unsigned_operation_encoding:
|
||||||
(Operation.shell_header * proto_operation) Data_encoding.t
|
(Operation.shell_header * proto_operation) Data_encoding.t
|
||||||
|
|
||||||
val max_operation_data_length: int
|
|
||||||
|
@ -88,6 +88,9 @@ module Constants = struct
|
|||||||
let max_number_of_operations c =
|
let max_number_of_operations c =
|
||||||
let constants = Raw_context.constants c in
|
let constants = Raw_context.constants c in
|
||||||
constants.max_number_of_operations
|
constants.max_number_of_operations
|
||||||
|
let max_operation_data_length c =
|
||||||
|
let constants = Raw_context.constants c in
|
||||||
|
constants.max_operation_data_length
|
||||||
end
|
end
|
||||||
|
|
||||||
module Delegates_pubkey = Public_key_storage
|
module Delegates_pubkey = Public_key_storage
|
||||||
@ -130,7 +133,7 @@ let finalize ?commit_message:message c =
|
|||||||
let context = Raw_context.recover c in
|
let context = Raw_context.recover c in
|
||||||
let constants = Raw_context.constants c in
|
let constants = Raw_context.constants c in
|
||||||
{ Updater.context ; fitness ; message ; max_operations_ttl = 60 ;
|
{ Updater.context ; fitness ; message ; max_operations_ttl = 60 ;
|
||||||
max_operation_data_length = 0 ;
|
max_operation_data_length = constants.max_operation_data_length ;
|
||||||
max_number_of_operations = constants.max_number_of_operations ;
|
max_number_of_operations = constants.max_number_of_operations ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,6 +279,7 @@ module Constants : sig
|
|||||||
val proof_of_work_threshold: context -> int64
|
val proof_of_work_threshold: context -> int64
|
||||||
val dictator_pubkey: context -> Ed25519.Public_key.t
|
val dictator_pubkey: context -> Ed25519.Public_key.t
|
||||||
val max_number_of_operations: context -> int list
|
val max_number_of_operations: context -> int list
|
||||||
|
val max_operation_data_length: context -> int
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -626,8 +627,6 @@ module Operation : sig
|
|||||||
val unsigned_operation_encoding:
|
val unsigned_operation_encoding:
|
||||||
(Operation.shell_header * proto_operation) Data_encoding.t
|
(Operation.shell_header * proto_operation) Data_encoding.t
|
||||||
|
|
||||||
val max_operation_data_length: int
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
module Block_header : sig
|
module Block_header : sig
|
||||||
|
Loading…
Reference in New Issue
Block a user